[ 526.155361] env[65503]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=65503) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 526.155738] env[65503]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=65503) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 526.155789] env[65503]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=65503) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 526.156136] env[65503]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 526.269337] env[65503]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=65503) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 526.278534] env[65503]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=65503) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 526.323389] env[65503]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 526.885690] env[65503]: INFO nova.virt.driver [None req-75e63e3d-faa9-4f8e-9f22-fffe11900df2 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 526.957692] env[65503]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 526.957843] env[65503]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 526.957951] env[65503]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=65503) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 530.333231] env[65503]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-3e7cec5e-7b0e-40ea-a2c9-d11336629a0e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.350255] env[65503]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=65503) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 530.350428] env[65503]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-b632a1bc-d3cf-446d-b5a1-ac3daa8015da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.383908] env[65503]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 232c3. [ 530.384111] env[65503]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.426s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 530.384691] env[65503]: INFO nova.virt.vmwareapi.driver [None req-75e63e3d-faa9-4f8e-9f22-fffe11900df2 None None] VMware vCenter version: 7.0.3 [ 530.388087] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9938122d-c829-44e7-97aa-680dc4a6301d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.410482] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a06421-cf70-4a35-aa14-901d3dd625a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.417335] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82724e1f-1190-4ad5-9d30-f85f9965695c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.424860] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59df2b78-e2b9-4db5-905e-31d4569cedd0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.438875] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88072017-a015-4def-967c-494e8c7149d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.446167] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12095557-2b5c-4201-80f4-f1b1a1b7bf55 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.478146] env[65503]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-10264ca5-098b-4c7c-aa2e-a6eaf6668dbb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.484520] env[65503]: DEBUG nova.virt.vmwareapi.driver [None req-75e63e3d-faa9-4f8e-9f22-fffe11900df2 None None] Extension org.openstack.compute already exists. {{(pid=65503) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 530.487238] env[65503]: INFO nova.compute.provider_config [None req-75e63e3d-faa9-4f8e-9f22-fffe11900df2 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 530.996086] env[65503]: DEBUG nova.context [None req-75e63e3d-faa9-4f8e-9f22-fffe11900df2 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),e5e1a491-128f-4648-ba14-367ddfa4eff4(cell1) {{(pid=65503) load_cells /opt/stack/nova/nova/context.py:472}} [ 530.996086] env[65503]: INFO nova.utils [None req-75e63e3d-faa9-4f8e-9f22-fffe11900df2 None None] The cell worker thread pool MainProcess.cell_worker is initialized [ 530.996861] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 530.997261] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 530.998083] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 530.998684] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Acquiring lock "e5e1a491-128f-4648-ba14-367ddfa4eff4" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 530.999017] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Lock "e5e1a491-128f-4648-ba14-367ddfa4eff4" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 531.000172] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Lock "e5e1a491-128f-4648-ba14-367ddfa4eff4" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 531.021877] env[65503]: INFO dbcounter [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Registered counter for database nova_cell0 [ 531.031054] env[65503]: INFO dbcounter [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Registered counter for database nova_cell1 [ 531.034247] env[65503]: DEBUG oslo_db.sqlalchemy.engines [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=65503) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 531.034633] env[65503]: DEBUG oslo_db.sqlalchemy.engines [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=65503) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 531.039859] env[65503]: ERROR nova.db.main.api [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 531.039859] env[65503]: func(*args, **kwargs) [ 531.039859] env[65503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 531.039859] env[65503]: self.work.run() [ 531.039859] env[65503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 531.039859] env[65503]: result = self.fn(*self.args, **self.kwargs) [ 531.039859] env[65503]: File "/opt/stack/nova/nova/utils.py", line 584, in context_wrapper [ 531.039859] env[65503]: return func(*args, **kwargs) [ 531.039859] env[65503]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 531.039859] env[65503]: result = fn(*args, **kwargs) [ 531.039859] env[65503]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 531.039859] env[65503]: return f(*args, **kwargs) [ 531.039859] env[65503]: File "/opt/stack/nova/nova/objects/service.py", line 568, in _db_service_get_minimum_version [ 531.039859] env[65503]: return db.service_get_minimum_version(context, binaries) [ 531.039859] env[65503]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 531.039859] env[65503]: _check_db_access() [ 531.039859] env[65503]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 531.039859] env[65503]: stacktrace = ''.join(traceback.format_stack()) [ 531.039859] env[65503]: [ 531.040784] env[65503]: ERROR nova.db.main.api [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 531.040784] env[65503]: func(*args, **kwargs) [ 531.040784] env[65503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 531.040784] env[65503]: self.work.run() [ 531.040784] env[65503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 531.040784] env[65503]: result = self.fn(*self.args, **self.kwargs) [ 531.040784] env[65503]: File "/opt/stack/nova/nova/utils.py", line 584, in context_wrapper [ 531.040784] env[65503]: return func(*args, **kwargs) [ 531.040784] env[65503]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 531.040784] env[65503]: result = fn(*args, **kwargs) [ 531.040784] env[65503]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 531.040784] env[65503]: return f(*args, **kwargs) [ 531.040784] env[65503]: File "/opt/stack/nova/nova/objects/service.py", line 568, in _db_service_get_minimum_version [ 531.040784] env[65503]: return db.service_get_minimum_version(context, binaries) [ 531.040784] env[65503]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 531.040784] env[65503]: _check_db_access() [ 531.040784] env[65503]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 531.040784] env[65503]: stacktrace = ''.join(traceback.format_stack()) [ 531.040784] env[65503]: [ 531.041588] env[65503]: WARNING nova.objects.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 531.041588] env[65503]: WARNING nova.objects.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Failed to get minimum service version for cell e5e1a491-128f-4648-ba14-367ddfa4eff4 [ 531.041809] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Acquiring lock "singleton_lock" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.041954] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Acquired lock "singleton_lock" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 531.042206] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Releasing lock "singleton_lock" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 531.042521] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Full set of CONF: {{(pid=65503) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/service.py:274}} [ 531.042650] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ******************************************************************************** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 531.042764] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] Configuration options gathered from: {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 531.042885] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 531.043087] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 531.043209] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ================================================================================ {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 531.043407] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] allow_resize_to_same_host = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.043566] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] arq_binding_timeout = 300 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.043684] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] backdoor_port = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.043798] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] backdoor_socket = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.043949] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] block_device_allocate_retries = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.044114] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] block_device_allocate_retries_interval = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.044265] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cell_worker_thread_pool_size = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.044419] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cert = self.pem {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.044568] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.044723] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute_monitors = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.044872] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] config_dir = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.045035] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] config_drive_format = iso9660 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.045162] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.045322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] config_source = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.045507] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] console_host = devstack {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.045659] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] control_exchange = nova {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.045803] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cpu_allocation_ratio = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.045951] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] daemon = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.046115] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] debug = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.046264] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] default_access_ip_network_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.046413] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] default_availability_zone = nova {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.046555] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] default_ephemeral_format = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.046698] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] default_green_pool_size = 1000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.046913] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.047081] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] default_schedule_zone = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.047256] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] default_thread_pool_size = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.047407] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] disk_allocation_ratio = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.047554] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] enable_new_services = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.047702] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] flat_injected = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.047848] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] force_config_drive = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.047992] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] force_raw_images = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.048165] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] graceful_shutdown_timeout = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.048339] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] heal_instance_info_cache_interval = -1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.048551] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] host = cpu-1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.048716] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.048870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] initial_disk_allocation_ratio = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.049026] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] initial_ram_allocation_ratio = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.049278] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.049442] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instance_build_timeout = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.049592] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instance_delete_interval = 300 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.049745] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instance_format = [instance: %(uuid)s] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.049900] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instance_name_template = instance-%08x {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.050067] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instance_usage_audit = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.050246] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instance_usage_audit_period = month {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.050416] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.050572] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] instances_path = /opt/stack/data/nova/instances {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.050722] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] internal_service_availability_zone = internal {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.050862] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] key = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.051013] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] live_migration_retry_count = 30 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.051180] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_color = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.051331] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_config_append = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.051483] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.051627] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_dir = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.051772] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_file = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.051887] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_options = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.052053] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_rotate_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.052213] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_rotate_interval_type = days {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.052365] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] log_rotation_type = none {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.052480] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.052595] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.052749] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.052899] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.053024] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.053179] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] long_rpc_timeout = 1800 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.053326] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] max_concurrent_builds = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.053470] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] max_concurrent_live_migrations = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.053613] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] max_concurrent_snapshots = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.053759] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] max_local_block_devices = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.053903] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] max_logfile_count = 30 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.054056] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] max_logfile_size_mb = 200 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.054203] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] maximum_instance_delete_attempts = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.054381] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] migrate_max_retries = -1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.054556] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] mkisofs_cmd = genisoimage {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.054831] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] my_block_storage_ip = 10.180.1.21 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.054893] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] my_ip = 10.180.1.21 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.055064] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.055219] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] network_allocate_retries = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.055385] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.055538] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] osapi_compute_unique_server_name_scope = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.055685] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] password_length = 12 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.055832] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] periodic_enable = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.055979] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] periodic_fuzzy_delay = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.056151] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] pointer_model = usbtablet {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.056305] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] preallocate_images = none {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.056452] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] publish_errors = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.056570] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] pybasedir = /opt/stack/nova {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.056712] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ram_allocation_ratio = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.056860] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] rate_limit_burst = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.057048] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] rate_limit_except_level = CRITICAL {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.057252] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] rate_limit_interval = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.057394] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] reboot_timeout = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.057537] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] reclaim_instance_interval = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.057676] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] record = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.057828] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] reimage_timeout_per_gb = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.057980] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] report_interval = 120 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.058144] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] rescue_timeout = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.058295] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] reserved_host_cpus = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.058438] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] reserved_host_disk_mb = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.058579] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] reserved_host_memory_mb = 512 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.058723] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] reserved_huge_pages = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.058864] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] resize_confirm_window = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.059015] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] resize_fs_using_block_device = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.059199] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] resume_guests_state_on_host_boot = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.059362] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.059509] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] rpc_response_timeout = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.059656] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] run_external_periodic_tasks = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.059836] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] running_deleted_instance_action = reap {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.060039] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] running_deleted_instance_poll_interval = 1800 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.060205] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] running_deleted_instance_timeout = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.060350] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler_instance_sync_interval = 120 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.060505] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_down_time = 720 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.060686] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] servicegroup_driver = db {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.060819] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] shell_completion = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.060942] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] shelved_offload_time = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.061124] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] shelved_poll_interval = 3600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.061250] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] shutdown_timeout = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.061410] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] source_is_ipv6 = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.061548] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ssl_only = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.061797] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.061953] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] sync_power_state_interval = 600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.062114] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] sync_power_state_pool_size = 1000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.062271] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] syslog_log_facility = LOG_USER {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.062412] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] tempdir = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.062556] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] thread_pool_statistic_period = -1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.062699] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] timeout_nbd = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.062852] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] transport_url = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.062996] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] update_resources_interval = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.063154] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] use_cow_images = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.063301] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] use_journal = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.063446] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] use_json = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.063588] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] use_rootwrap_daemon = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.063730] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] use_stderr = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.063871] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] use_syslog = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.064018] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vcpu_pin_set = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.064174] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plugging_is_fatal = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.064329] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plugging_timeout = 300 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.064483] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] virt_mkfs = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.064628] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] volume_usage_poll_interval = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.064772] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] watch_log_file = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.064921] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] web = /usr/share/spice-html5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 531.065113] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_concurrency.disable_process_locking = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.065753] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.065941] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_brick.lock_path = /opt/stack/data/os_brick {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.066117] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.066277] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.066473] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.066643] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.066804] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.066961] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.067151] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.067322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.compute_link_prefix = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.067483] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.067636] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.dhcp_domain = novalocal {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.067788] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.enable_instance_password = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.067935] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.glance_link_prefix = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.068098] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.068261] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.068410] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.instance_list_per_project_cells = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.068556] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.list_records_by_skipping_down_cells = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.068699] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.local_metadata_per_cell = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.068846] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.max_limit = 1000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.068995] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.metadata_cache_expiration = 15 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.069199] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.neutron_default_project_id = default {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.069361] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.response_validation = warn {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.069512] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.use_neutron_default_nets = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.069660] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.069806] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.069954] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.070125] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.070308] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.vendordata_dynamic_targets = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.070461] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.vendordata_jsonfile_path = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.070624] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.070803] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.backend = dogpile.cache.memcached {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.070958] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.backend_argument = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.071119] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.backend_expiration_time = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.071276] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.config_prefix = cache.oslo {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.071435] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.dead_timeout = 60.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.071583] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.debug_cache_backend = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.071727] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.enable_retry_client = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.071870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.enable_socket_keepalive = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.072063] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.enabled = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.072200] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.enforce_fips_mode = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.072354] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.expiration_time = 600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.072502] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.hashclient_retry_attempts = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.072650] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.hashclient_retry_delay = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.072795] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_dead_retry = 300 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.072936] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_password = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.073102] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.073255] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.073406] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_pool_maxsize = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.073556] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.073693] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_sasl_enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.073854] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.074011] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_socket_timeout = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.074163] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.memcache_username = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.074316] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.proxies = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.074463] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.redis_db = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.074610] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.redis_password = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.074765] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.redis_sentinel_service_name = mymaster {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.074924] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.075089] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.redis_server = localhost:6379 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.075248] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.redis_socket_timeout = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.075396] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.redis_username = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.075546] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.retry_attempts = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.075695] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.retry_delay = 0.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.075843] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.socket_keepalive_count = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.075988] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.socket_keepalive_idle = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.076146] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.socket_keepalive_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.076292] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.tls_allowed_ciphers = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.076437] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.tls_cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.076574] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.tls_certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.076717] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.tls_enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.076858] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cache.tls_keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.077019] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.077220] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.auth_type = password {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.077376] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.077543] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.catalog_info = volumev3::publicURL {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.077690] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.077838] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.077982] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.cross_az_attach = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.078147] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.debug = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.078294] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.endpoint_template = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.078455] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.http_retries = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.078636] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.078785] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.078945] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.os_region_name = RegionOne {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.079141] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.079302] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cinder.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.079460] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.079607] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.cpu_dedicated_set = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.079750] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.cpu_shared_set = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.079900] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.image_type_exclude_list = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.080084] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.080210] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.max_concurrent_disk_ops = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.080357] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.max_disk_devices_to_attach = -1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.080501] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.080653] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.080800] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.resource_provider_association_refresh = 300 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.080942] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.081098] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.shutdown_retry_interval = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.081267] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.081430] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] conductor.workers = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.081590] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] console.allowed_origins = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.081735] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] console.ssl_ciphers = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.081891] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] console.ssl_minimum_version = default {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.082055] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] consoleauth.enforce_session_timeout = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.082216] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] consoleauth.token_ttl = 600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.082374] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.082515] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.082661] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.082807] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.082949] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.083105] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.083254] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.083397] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.083545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.083683] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.083822] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.083962] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.084116] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.084273] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.service_type = accelerator {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.084419] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.084560] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.084703] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.084845] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.085030] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.085172] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] cyborg.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.085330] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.asyncio_connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.085474] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.asyncio_slave_connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.085634] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.backend = sqlalchemy {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.085787] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.085940] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.connection_debug = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.086108] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.connection_parameters = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.086269] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.connection_recycle_time = 3600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.086411] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.connection_trace = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.086555] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.db_inc_retry_interval = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.086703] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.db_max_retries = 20 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.086849] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.db_max_retry_interval = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.086997] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.db_retry_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.087180] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.max_overflow = 50 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.087337] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.max_pool_size = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.087484] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.max_retries = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.087639] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.087785] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.mysql_wsrep_sync_wait = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.087926] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.pool_timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.088085] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.retry_interval = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.088238] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.slave_connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.088389] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.sqlite_synchronous = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.088537] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] database.use_db_reconnect = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.088681] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.asyncio_connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.088825] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.asyncio_slave_connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.088981] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.backend = sqlalchemy {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.089183] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.089344] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.connection_debug = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.089501] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.connection_parameters = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.089651] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.connection_recycle_time = 3600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.089796] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.connection_trace = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.089942] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.db_inc_retry_interval = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.090116] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.db_max_retries = 20 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.090303] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.db_max_retry_interval = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.090467] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.db_retry_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.090618] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.max_overflow = 50 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.090767] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.max_pool_size = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.090912] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.max_retries = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.091079] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.091231] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.091383] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.pool_timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.091558] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.retry_interval = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.091707] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.slave_connection = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.091853] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] api_database.sqlite_synchronous = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.092024] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] devices.enabled_mdev_types = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.092192] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.092349] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ephemeral_storage_encryption.default_format = luks {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.092496] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ephemeral_storage_encryption.enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.092645] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.092800] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.api_servers = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.092947] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.093110] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.093266] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.093691] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.093691] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.093691] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.debug = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.093832] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.default_trusted_certificate_ids = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.093977] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.enable_certificate_validation = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.094138] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.enable_rbd_download = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.094284] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.094433] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.094576] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.094719] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.094861] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.095017] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.num_retries = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.095177] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.rbd_ceph_conf = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.095327] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.rbd_connect_timeout = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.095482] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.rbd_pool = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.095630] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.rbd_user = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.095774] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.095916] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.096071] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.096228] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.service_type = image {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.096375] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.096521] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.096840] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.096840] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.096961] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.097151] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.verify_glance_signatures = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.097312] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] glance.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.097466] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] guestfs.debug = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.097620] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.097766] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.auth_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.097907] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.098061] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.098258] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.098365] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.098510] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.098649] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.098795] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.098936] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.099118] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.099272] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.099418] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.099562] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.099706] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.099860] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.service_type = shared-file-system {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.100014] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.share_apply_policy_timeout = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.100178] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.100336] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.100480] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.100625] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.100792] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.100937] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] manila.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.101106] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] mks.enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.101463] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.101639] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] image_cache.manager_interval = 2400 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.101794] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] image_cache.precache_concurrency = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.101952] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] image_cache.remove_unused_base_images = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.102128] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.102310] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.102494] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] image_cache.subdirectory_name = _base {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.102658] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.api_max_retries = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.102809] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.api_retry_interval = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.102952] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.103112] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.auth_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.103262] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.103405] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.103553] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.103702] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.conductor_group = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.103848] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.104020] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.104190] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.104377] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.104599] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.104828] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.105067] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.105295] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.peer_list = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.105506] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.105698] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.105880] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.serial_console_state_timeout = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.106047] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.106211] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.service_type = baremetal {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.106361] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.shard = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.106509] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.106656] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.106800] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.106943] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.107126] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.107278] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ironic.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.107448] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.107607] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] key_manager.fixed_key = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.107773] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.107918] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.barbican_api_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.108074] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.barbican_endpoint = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.108235] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.barbican_endpoint_type = public {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.108378] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.barbican_region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.108520] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.108663] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.108809] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.108951] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.109137] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.109302] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.number_of_retries = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.109454] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.retry_delay = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.109602] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.send_service_user_token = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.109748] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.109891] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.110100] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.verify_ssl = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.110205] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican.verify_ssl_path = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.110370] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.110520] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.auth_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.110662] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.110802] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.110949] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.111103] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.111246] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.111390] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.111531] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] barbican_service_user.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.111683] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.approle_role_id = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.111828] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.approle_secret_id = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.111982] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.kv_mountpoint = secret {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.112140] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.kv_path = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.112293] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.kv_version = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.112439] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.namespace = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.112580] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.root_token_id = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.112721] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.ssl_ca_crt_file = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.112873] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.timeout = 60.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.113044] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.use_ssl = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.113207] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.113363] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.113509] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.113656] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.113800] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.113945] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.114100] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.114252] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.114428] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.114582] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.114726] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.114871] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.115028] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.115180] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.115340] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.service_type = identity {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.115487] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.115634] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.115782] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.115927] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.116105] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.116259] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] keystone.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.116434] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.ceph_mount_options = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.116746] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.116909] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.connection_uri = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.117071] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.cpu_mode = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.117231] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.cpu_model_extra_flags = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.117383] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.cpu_models = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.117538] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.cpu_power_governor_high = performance {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.117691] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.cpu_power_governor_low = powersave {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.117840] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.cpu_power_management = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.117997] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.118164] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.device_detach_attempts = 8 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.118314] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.device_detach_timeout = 20 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.118465] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.disk_cachemodes = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.118608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.disk_prefix = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.118758] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.enabled_perf_events = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.118912] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.file_backed_memory = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.119098] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.gid_maps = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.119260] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.hw_disk_discard = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.119408] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.hw_machine_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.119565] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.images_rbd_ceph_conf = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.119713] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.119861] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.120024] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.images_rbd_glance_store_name = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.120197] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.images_rbd_pool = rbd {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.120369] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.images_type = default {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.120517] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.images_volume_group = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.120664] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.inject_key = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.120812] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.inject_partition = -2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.120963] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.inject_password = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.121122] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.iscsi_iface = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.121275] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.iser_use_multipath = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.121427] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_bandwidth = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.121574] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.121722] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_downtime = 500 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.121872] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.122030] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.122183] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_inbound_addr = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.122336] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_parallel_connections = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.122481] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.122626] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_permit_post_copy = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.122770] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_scheme = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.122924] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_timeout_action = abort {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.123084] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_tunnelled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.123235] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_uri = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.123382] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.live_migration_with_native_tls = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.123526] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.max_queues = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.123673] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.123889] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.124049] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.nfs_mount_options = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.124341] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.124505] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.124658] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.num_iser_scan_tries = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.124805] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.num_memory_encrypted_guests = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.124953] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.125119] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.num_pcie_ports = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.125271] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.num_volume_scan_tries = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.125422] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.pmem_namespaces = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.125566] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.quobyte_client_cfg = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.125869] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.126042] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rbd_connect_timeout = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.126199] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.126365] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.126538] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rbd_secret_uuid = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.126687] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rbd_user = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.126839] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.126997] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.remote_filesystem_transport = ssh {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.127160] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rescue_image_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.127309] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rescue_kernel_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.127455] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rescue_ramdisk_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.127609] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.127755] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.rx_queue_size = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.127909] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.smbfs_mount_options = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.128189] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.128360] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.snapshot_compression = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.128509] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.snapshot_image_format = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.128723] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.128876] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.sparse_logical_volumes = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.129033] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.swtpm_enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.129219] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.swtpm_group = tss {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.129375] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.swtpm_user = tss {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.129528] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.sysinfo_serial = unique {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.129673] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.tb_cache_size = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.129814] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.tx_queue_size = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.129962] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.uid_maps = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.130123] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.use_virtio_for_bridges = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.130300] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.virt_type = kvm {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.130458] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.volume_clear = zero {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.130607] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.volume_clear_size = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.130755] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.volume_enforce_multipath = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.130906] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.volume_use_multipath = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.131061] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.vzstorage_cache_path = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.131221] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.131375] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.vzstorage_mount_group = qemu {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.131533] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.vzstorage_mount_opts = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.131686] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.131962] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.132139] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.vzstorage_mount_user = stack {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.132297] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.132455] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.132612] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.auth_type = password {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.132755] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.132896] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.133052] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.133198] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.133340] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.133491] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.default_floating_pool = public {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.133634] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.133778] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.extension_sync_interval = 600 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.133919] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.http_retries = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.134072] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.134217] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.134354] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.134503] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.134642] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.134791] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.ovs_bridge = br-int {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.134933] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.physnets = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.135104] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.region_name = RegionOne {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.135251] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.135404] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.service_metadata_proxy = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.135548] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.135698] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.service_type = network {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.135842] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.135986] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.136142] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.136283] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.136444] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.136587] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] neutron.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.136742] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] notifications.bdms_in_notifications = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.136901] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] notifications.default_level = INFO {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.137060] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] notifications.include_share_mapping = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.137226] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] notifications.notification_format = unversioned {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.137374] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] notifications.notify_on_state_change = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.137536] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.137699] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] pci.alias = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.137854] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] pci.device_spec = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.138007] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] pci.report_in_placement = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.138175] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.138332] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.auth_type = password {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.138517] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.138674] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.138819] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.138968] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.139160] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.139315] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.139462] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.default_domain_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.139603] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.default_domain_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.139743] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.domain_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.139882] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.domain_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.140036] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.140186] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.140328] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.140468] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.140605] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.140754] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.password = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.140897] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.project_domain_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.141058] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.project_domain_name = Default {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.141214] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.project_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.141372] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.project_name = service {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.141525] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.region_name = RegionOne {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.141669] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.141815] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.141963] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.service_type = placement {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.142120] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.142265] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.142405] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.142545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.system_scope = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.142685] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.142823] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.trust_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.142962] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.user_domain_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.143127] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.user_domain_name = Default {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.143272] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.user_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.143427] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.username = nova {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.143588] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.143729] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] placement.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.143895] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.cores = 20 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.144062] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.count_usage_from_placement = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.144231] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.144383] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.injected_file_content_bytes = 10240 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.144533] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.injected_file_path_length = 255 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.144681] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.injected_files = 5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.144828] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.instances = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.144974] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.key_pairs = 100 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.145137] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.metadata_items = 128 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.145291] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.ram = 51200 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.145438] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.recheck_quota = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.145590] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.server_group_members = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.145738] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.server_groups = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.145936] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.146108] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] quota.unified_limits_resource_strategy = require {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.146270] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.146418] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.146563] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.image_metadata_prefilter = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.146708] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.146857] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.max_attempts = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.147007] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.max_placement_results = 1000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.147165] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.147312] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.query_placement_for_image_type_support = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.147456] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.147614] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] scheduler.workers = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.147770] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.147924] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.148099] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.148260] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.148412] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.148564] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.148716] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.148893] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.149062] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.host_subset_size = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.149223] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.149372] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.149521] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.149671] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.149818] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.149968] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.isolated_hosts = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.150134] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.isolated_images = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.150329] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.150489] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.150654] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.150805] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.pci_in_placement = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.150952] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.151116] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.151267] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.151412] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.151564] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.151712] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.151861] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.track_instance_changes = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.152034] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.152198] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] metrics.required = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.152352] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] metrics.weight_multiplier = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.152503] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.152656] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] metrics.weight_setting = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.152973] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.153148] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] serial_console.enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.153327] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] serial_console.port_range = 10000:20000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.153488] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.153643] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.153799] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] serial_console.serialproxy_port = 6083 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.153952] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.154124] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.auth_type = password {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.154277] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.154423] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.154569] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.154712] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.154854] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.155015] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.send_service_user_token = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.155171] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.155316] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] service_user.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.155469] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.agent_enabled = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.155618] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.155922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.156129] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.156294] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.html5proxy_port = 6082 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.156441] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.image_compression = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.156586] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.jpeg_compression = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.156726] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.playback_compression = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.156871] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.require_secure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.157034] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.server_listen = 127.0.0.1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.157196] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.157469] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.157623] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.streaming_mode = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.157770] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] spice.zlib_compression = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.157924] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] upgrade_levels.baseapi = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.158093] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] upgrade_levels.compute = auto {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.158246] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] upgrade_levels.conductor = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.158391] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] upgrade_levels.scheduler = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.158543] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.158689] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.auth_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.158832] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.158974] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.159175] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.159333] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.159480] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.159628] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.159773] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vendordata_dynamic_auth.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.159933] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.api_retry_count = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.160113] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.ca_file = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.160353] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.cache_prefix = devstack-image-cache {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.160539] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.cluster_name = testcl1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.160697] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.connection_pool_size = 10 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.160845] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.console_delay_seconds = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.160998] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.datastore_regex = ^datastore.* {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.161218] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.161394] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.host_password = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.161550] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.host_port = 443 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.161704] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.host_username = administrator@vsphere.local {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.161856] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.insecure = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.162007] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.integration_bridge = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.162170] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.maximum_objects = 100 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.162317] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.pbm_default_policy = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.162460] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.pbm_enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.162602] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.pbm_wsdl_location = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.162754] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.162895] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.serial_port_proxy_uri = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.163045] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.serial_port_service_uri = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.163200] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.task_poll_interval = 0.5 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.163356] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.use_linked_clone = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.163507] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.vnc_keymap = en-us {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.163656] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.vnc_port = 5900 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.163805] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vmware.vnc_port_total = 10000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.163975] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.auth_schemes = ['none'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.164148] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.164435] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.164602] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.164759] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.novncproxy_port = 6080 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.164939] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.server_listen = 127.0.0.1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.165117] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.165269] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.vencrypt_ca_certs = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.165413] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.vencrypt_client_cert = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.165556] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vnc.vencrypt_client_key = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.165714] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.165861] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.disable_deep_image_inspection = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.166010] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.166166] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.166314] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.166456] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.disable_rootwrap = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.166602] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.enable_numa_live_migration = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.166746] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.166891] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.167044] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.167194] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.libvirt_disable_apic = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.167341] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.167489] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.167634] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.167778] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.167923] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.168078] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.168228] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.168374] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.168512] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.168660] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.168826] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.168974] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] wsgi.secure_proxy_ssl_header = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.169162] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] zvm.ca_file = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.169312] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] zvm.cloud_connector_url = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.169606] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.169767] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] zvm.reachable_timeout = 300 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.169925] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.170134] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.170299] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.connection_string = messaging:// {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.170459] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.enabled = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.170618] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.es_doc_type = notification {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.170767] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.es_scroll_size = 10000 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.170919] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.es_scroll_time = 2m {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.171079] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.filter_error_trace = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.171240] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.hmac_keys = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.171391] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.sentinel_service_name = mymaster {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.171540] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.socket_timeout = 0.1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.171686] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.trace_requests = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.171832] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler.trace_sqlalchemy = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.171994] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler_jaeger.process_tags = {} {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.172155] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler_jaeger.service_name_prefix = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.172332] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] profiler_otlp.service_name_prefix = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.172569] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.172669] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.172821] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.172968] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.173133] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.173283] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.173432] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.173579] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.173725] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.173881] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.174037] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.174196] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.174349] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.174500] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.174647] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.174800] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.174948] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.175108] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.175269] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.175417] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.175566] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.175716] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.175863] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.176020] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.176174] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.176325] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.176471] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.176619] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.176763] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.176910] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.177090] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.ssl = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.177242] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.177395] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.177542] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.177694] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.177845] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.ssl_version = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.177990] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.178181] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.178337] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_notifications.retry = -1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.178497] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.178652] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_messaging_notifications.transport_url = **** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.178809] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.auth_section = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.178957] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.auth_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.179139] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.cafile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.179298] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.certfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.179450] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.collect_timing = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.179593] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.connect_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.179735] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.connect_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.179878] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.endpoint_id = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.180048] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.endpoint_interface = publicURL {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.180235] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.endpoint_override = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.180398] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.endpoint_region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.180545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.endpoint_service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.180690] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.endpoint_service_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.180836] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.insecure = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.180979] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.keyfile = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.181137] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.max_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.181282] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.min_version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.181424] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.region_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.181569] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.retriable_status_codes = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.181712] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.service_name = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.181855] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.service_type = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.182006] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.split_loggers = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.182185] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.status_code_retries = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.182336] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.status_code_retry_delay = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.182483] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.timeout = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.182629] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.valid_interfaces = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.182773] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_limit.version = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.182926] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_reports.file_event_handler = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.191870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.191870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] oslo_reports.log_dir = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.191870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.191870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.191870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.191870] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_linux_bridge_privileged.log_daemon_traceback = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_ovs_privileged.group = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192545] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192801] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_ovs_privileged.log_daemon_traceback = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192801] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192801] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192801] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] vif_plug_ovs_privileged.user = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192801] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.flat_interface = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.192801] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193070] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193070] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193070] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193070] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193070] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193070] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_ovs.isolate_vif = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_ovs.ovsdb_interface = native {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193322] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] os_vif_ovs.per_port_bridge = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] privsep_osbrick.capabilities = [21, 2] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] privsep_osbrick.group = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] privsep_osbrick.helper_command = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] privsep_osbrick.log_daemon_traceback = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193608] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] privsep_osbrick.user = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] nova_sys_admin.group = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] nova_sys_admin.helper_command = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] nova_sys_admin.log_daemon_traceback = False {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.193922] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] nova_sys_admin.user = None {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 531.194229] env[65503]: DEBUG oslo_service.backend._eventlet.service [None req-fae96168-3cb0-4fab-8703-82572a7871a5 None None] ******************************************************************************** {{(pid=65503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 531.194229] env[65503]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 531.694044] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Getting list of instances from cluster (obj){ [ 531.694044] env[65503]: value = "domain-c8" [ 531.694044] env[65503]: _type = "ClusterComputeResource" [ 531.694044] env[65503]: } {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 531.694972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb3a76d-36ef-4d7e-902e-d33a162d7286 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.704419] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Got total of 0 instances {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 531.705075] env[65503]: WARNING nova.virt.vmwareapi.driver [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 531.705636] env[65503]: INFO nova.virt.node [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Generated node identity 988ff85a-1d12-41bb-a369-e298e8491ca1 [ 531.705839] env[65503]: INFO nova.virt.node [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Wrote node identity 988ff85a-1d12-41bb-a369-e298e8491ca1 to /opt/stack/data/n-cpu-1/compute_id [ 532.209465] env[65503]: WARNING nova.compute.manager [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Compute nodes ['988ff85a-1d12-41bb-a369-e298e8491ca1'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 533.215370] env[65503]: INFO nova.compute.manager [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 534.221087] env[65503]: WARNING nova.compute.manager [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 534.221482] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 534.221970] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 534.222162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 534.222362] env[65503]: DEBUG nova.compute.resource_tracker [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 534.223240] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01901075-a072-4965-b7be-9cf9e3da0de0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.232213] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f18260-406f-4d64-ac29-60c4f221bfc8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.248371] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fcebc0-9f8a-47f1-933e-e332ac1fe0af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.258484] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6bcf51-ab84-46e1-870e-690cc28cf6e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.290051] env[65503]: DEBUG nova.compute.resource_tracker [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180700MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 534.290051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 534.290051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 534.792118] env[65503]: WARNING nova.compute.resource_tracker [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] No compute node record for cpu-1:988ff85a-1d12-41bb-a369-e298e8491ca1: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 988ff85a-1d12-41bb-a369-e298e8491ca1 could not be found. [ 535.296072] env[65503]: INFO nova.compute.resource_tracker [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 988ff85a-1d12-41bb-a369-e298e8491ca1 [ 536.805273] env[65503]: DEBUG nova.compute.resource_tracker [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 536.805711] env[65503]: DEBUG nova.compute.resource_tracker [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 536.971088] env[65503]: INFO nova.scheduler.client.report [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] [req-e9c4c22e-fa94-4c2f-a287-12971c8ff4c2] Created resource provider record via placement API for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 536.990497] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f58505c-b006-41db-9d0d-12bb3ef079bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.998401] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504a76ab-2c5e-4132-b4fb-0941c783324a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.029697] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6be5d3-d43e-48c7-a27d-627937c0836b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.038077] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d7608e-00d2-445f-b5a8-3545483244c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.052143] env[65503]: DEBUG nova.compute.provider_tree [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 537.598264] env[65503]: DEBUG nova.scheduler.client.report [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 537.598489] env[65503]: DEBUG nova.compute.provider_tree [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 0 to 1 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 537.598609] env[65503]: DEBUG nova.compute.provider_tree [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 537.654503] env[65503]: DEBUG nova.compute.provider_tree [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 1 to 2 during operation: update_traits {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 538.159545] env[65503]: DEBUG nova.compute.resource_tracker [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 538.159928] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.873s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 538.159928] env[65503]: DEBUG nova.service [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Creating RPC server for service compute {{(pid=65503) start /opt/stack/nova/nova/service.py:177}} [ 538.174867] env[65503]: DEBUG nova.service [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] Join ServiceGroup membership for this service compute {{(pid=65503) start /opt/stack/nova/nova/service.py:194}} [ 538.175033] env[65503]: DEBUG nova.servicegroup.drivers.db [None req-8ae9c05d-2ba7-47ce-99bc-2540637db6e2 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=65503) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 559.179585] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_power_states {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.682785] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Getting list of instances from cluster (obj){ [ 559.682785] env[65503]: value = "domain-c8" [ 559.682785] env[65503]: _type = "ClusterComputeResource" [ 559.682785] env[65503]: } {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 559.683967] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5b7bfa-9164-4653-a74b-36ddd54e8c74 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.693312] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Got total of 0 instances {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 559.693524] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.693827] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Getting list of instances from cluster (obj){ [ 559.693827] env[65503]: value = "domain-c8" [ 559.693827] env[65503]: _type = "ClusterComputeResource" [ 559.693827] env[65503]: } {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 559.694655] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79560cb3-ad7b-41de-bbc5-d486b1819106 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.702420] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Got total of 0 instances {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 580.475882] env[65503]: INFO nova.utils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The default thread pool MainProcess.default is initialized [ 580.477264] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "39266117-e82e-48ae-932a-be04b1a7351a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.477741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.983844] env[65503]: DEBUG nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 581.522097] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.522810] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.523877] env[65503]: INFO nova.compute.claims [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.988316] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.988387] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.494146] env[65503]: DEBUG nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 582.605508] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a23ac90-0523-4ce8-bf73-cd81cc6145ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.616696] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf5dc9d-3704-4129-a6bc-d5183f10e335 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.651576] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b9c368-4a8c-463e-8b69-0030efacf025 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.661962] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee30acce-f26b-42f4-8ec8-f4d0756408f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.679519] env[65503]: DEBUG nova.compute.provider_tree [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.055983] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.185949] env[65503]: DEBUG nova.scheduler.client.report [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 583.388295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.388506] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.697204] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 583.698079] env[65503]: DEBUG nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 583.703205] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.647s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.704573] env[65503]: INFO nova.compute.claims [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.712443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.712443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.891862] env[65503]: DEBUG nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 584.206815] env[65503]: DEBUG nova.compute.utils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 584.208583] env[65503]: DEBUG nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 584.208770] env[65503]: DEBUG nova.network.neutron [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 584.212722] env[65503]: WARNING neutronclient.v2_0.client [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 584.212722] env[65503]: WARNING neutronclient.v2_0.client [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 584.212866] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 584.214257] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 584.231660] env[65503]: DEBUG nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 584.423242] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.719722] env[65503]: DEBUG nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 584.756287] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.817408] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a15578-0ecf-4b41-a669-267c0187fad9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.826047] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640cd73b-0caa-4aa8-b8f8-3650b1922be9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.849425] env[65503]: DEBUG nova.policy [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4708fca766f447daa757dbf855ff7d89', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '771d20568f55445088cc06737c184615', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 584.885685] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa0d481-1975-41f5-9df3-1eb96a52080d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.900232] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b8f54b-fe6a-4895-966d-2d7aa92c8f25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.915294] env[65503]: DEBUG nova.compute.provider_tree [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.370157] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 585.370157] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 585.419501] env[65503]: DEBUG nova.scheduler.client.report [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 585.519112] env[65503]: DEBUG nova.network.neutron [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Successfully created port: 03697e78-5935-45aa-a1d5-1bf8701e3f56 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 585.734446] env[65503]: DEBUG nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 585.776222] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 585.776466] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 585.776616] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 585.776791] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 585.776932] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 585.779429] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 585.779429] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 585.780372] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 585.780677] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 585.780851] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 585.781075] env[65503]: DEBUG nova.virt.hardware [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 585.782122] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5193792d-6476-42a4-8a5d-46c831e8045d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.799710] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c5ae4b-803c-48fb-b0d4-6c2b6e06aa6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.821081] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273eac0b-5ec9-45d5-b183-c01973b98ebf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.927747] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 585.927747] env[65503]: DEBUG nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 585.932490] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.510s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.934956] env[65503]: INFO nova.compute.claims [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.287498] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "ad85eef0-cef7-4900-b193-1737a6c2f17b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.287720] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.335243] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.335243] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.335243] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.335243] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.335243] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.337981] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.338856] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.338856] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 586.338856] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.444209] env[65503]: DEBUG nova.compute.utils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 586.445428] env[65503]: DEBUG nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 586.792256] env[65503]: DEBUG nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 586.845118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.952259] env[65503]: DEBUG nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 587.117134] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341ab1c1-6606-45fe-87cf-6f969b76712b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.129735] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2777de-aa93-405e-b53c-3166649f1646 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.172240] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da839ca-84fd-4cfd-ac42-7c1df76d2c4d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.178028] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.178521] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.185306] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0705566f-79d8-4785-a368-c89055c58429 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.202017] env[65503]: DEBUG nova.compute.provider_tree [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.325319] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.378985] env[65503]: DEBUG nova.network.neutron [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Successfully updated port: 03697e78-5935-45aa-a1d5-1bf8701e3f56 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 587.680950] env[65503]: DEBUG nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 587.711329] env[65503]: DEBUG nova.scheduler.client.report [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 587.883599] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.883879] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 587.884079] env[65503]: DEBUG nova.network.neutron [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 587.964167] env[65503]: DEBUG nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 588.007864] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 588.008398] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 588.010045] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 588.010045] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 588.010045] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 588.010045] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 588.010045] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.010283] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 588.010283] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 588.010283] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 588.010283] env[65503]: DEBUG nova.virt.hardware [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 588.012369] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0185db92-1c45-4831-a83b-c89da01892ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.026240] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56fb6bb-618e-4d10-ba34-7bb8296a8054 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.046481] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 588.056516] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 588.057530] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d840cf1-4430-44f7-8b27-ca61b5020e04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.073965] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Created folder: OpenStack in parent group-v4. [ 588.074227] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Creating folder: Project (b6fd6581cea7434caff33b305a914d0d). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 588.074600] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5880ccdb-f5e8-4631-a06c-82880a720bd0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.090704] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Created folder: Project (b6fd6581cea7434caff33b305a914d0d) in parent group-v870190. [ 588.091084] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Creating folder: Instances. Parent ref: group-v870191. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 588.091184] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c4763ed-f98c-4260-8579-c9025753457f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.100834] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Created folder: Instances in parent group-v870191. [ 588.101104] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 588.101325] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 588.101534] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b2531a5-db63-473c-aea4-5566c3d094de {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.122020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.122020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 588.123014] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 588.123014] env[65503]: value = "task-4449338" [ 588.123014] env[65503]: _type = "Task" [ 588.123014] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.136205] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449338, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.217819] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.285s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.218439] env[65503]: DEBUG nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 588.222195] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.222465] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.466s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 588.227507] env[65503]: INFO nova.compute.claims [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.397658] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 588.399449] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 588.473903] env[65503]: DEBUG nova.network.neutron [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 588.532563] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 588.532563] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 588.632989] env[65503]: DEBUG nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 588.650963] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449338, 'name': CreateVM_Task, 'duration_secs': 0.421772} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.651426] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 588.652529] env[65503]: DEBUG oslo_vmware.service [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ece25b1-1ed4-4e13-bd7c-2c9f809ee2b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.659258] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.659424] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.660121] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 588.660575] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bb62d37-c809-41e8-807e-78de85b30247 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.666294] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 588.666294] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5211be92-1c79-f5f7-73aa-03b3e0e2eefd" [ 588.666294] env[65503]: _type = "Task" [ 588.666294] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.675635] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5211be92-1c79-f5f7-73aa-03b3e0e2eefd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.736166] env[65503]: DEBUG nova.compute.utils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 588.739478] env[65503]: DEBUG nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 588.739760] env[65503]: DEBUG nova.network.neutron [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 588.741155] env[65503]: WARNING neutronclient.v2_0.client [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 588.741408] env[65503]: WARNING neutronclient.v2_0.client [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 588.741967] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 588.742340] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 588.754652] env[65503]: WARNING neutronclient.v2_0.client [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 588.755360] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 588.755875] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 588.943082] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe156db2-c1bc-459a-ad3e-647a2297c84a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.952428] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba578dc7-7653-4d03-9ab6-ee65c6c77bd7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.986182] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e022db0-b253-4585-b257-0e4303c9de63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.996762] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dded9c16-d002-48c8-8209-aa4a2c78f588 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.012794] env[65503]: DEBUG nova.compute.provider_tree [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.084196] env[65503]: DEBUG nova.network.neutron [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance_info_cache with network_info: [{"id": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "address": "fa:16:3e:34:a3:37", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03697e78-59", "ovs_interfaceid": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 589.091521] env[65503]: DEBUG nova.policy [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a470b09e462b4f2da9f55853d8cc0ff8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f1a7c2c68a498c8ffa173e9778d59d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 589.156227] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.177766] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 589.178020] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.178262] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.178402] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.179262] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.179262] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aba572fe-17ce-4cc7-93ad-5d2e68a7a28e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.198229] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.198512] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 589.199367] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c355e11f-f7cb-46d6-8819-3197ab38245e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.208939] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-580892fd-727f-49b8-85a8-c1ded0d87b21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.215051] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 589.215051] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52297cc6-4a17-89fd-2385-6092fa548605" [ 589.215051] env[65503]: _type = "Task" [ 589.215051] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.224967] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52297cc6-4a17-89fd-2385-6092fa548605, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.254320] env[65503]: DEBUG nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 589.436780] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "1e92795e-cf30-4175-9e31-c29278f3e9e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.438188] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 589.480948] env[65503]: DEBUG nova.network.neutron [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Successfully created port: c6e9e5bd-a5fa-4b57-9707-33c2c2106702 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 589.516776] env[65503]: DEBUG nova.scheduler.client.report [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 589.587584] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 589.587943] env[65503]: DEBUG nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Instance network_info: |[{"id": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "address": "fa:16:3e:34:a3:37", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03697e78-59", "ovs_interfaceid": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 589.588495] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:a3:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03697e78-5935-45aa-a1d5-1bf8701e3f56', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.599507] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Creating folder: Project (771d20568f55445088cc06737c184615). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.599507] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c46c3f00-82fb-4c92-b949-5e3955ffffc9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.615106] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Created folder: Project (771d20568f55445088cc06737c184615) in parent group-v870190. [ 589.615425] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Creating folder: Instances. Parent ref: group-v870194. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.620531] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93539733-b3cd-426f-9753-af1c80bd840f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.624536] env[65503]: DEBUG nova.compute.manager [req-a37adfe8-12a6-4865-ab5e-006022a16fcb req-ea1a7ace-0b6a-4dc7-98ea-0c9c465b6773 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Received event network-vif-plugged-03697e78-5935-45aa-a1d5-1bf8701e3f56 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 589.624762] env[65503]: DEBUG oslo_concurrency.lockutils [req-a37adfe8-12a6-4865-ab5e-006022a16fcb req-ea1a7ace-0b6a-4dc7-98ea-0c9c465b6773 service nova] Acquiring lock "39266117-e82e-48ae-932a-be04b1a7351a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.624953] env[65503]: DEBUG oslo_concurrency.lockutils [req-a37adfe8-12a6-4865-ab5e-006022a16fcb req-ea1a7ace-0b6a-4dc7-98ea-0c9c465b6773 service nova] Lock "39266117-e82e-48ae-932a-be04b1a7351a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 589.625117] env[65503]: DEBUG oslo_concurrency.lockutils [req-a37adfe8-12a6-4865-ab5e-006022a16fcb req-ea1a7ace-0b6a-4dc7-98ea-0c9c465b6773 service nova] Lock "39266117-e82e-48ae-932a-be04b1a7351a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 589.625294] env[65503]: DEBUG nova.compute.manager [req-a37adfe8-12a6-4865-ab5e-006022a16fcb req-ea1a7ace-0b6a-4dc7-98ea-0c9c465b6773 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] No waiting events found dispatching network-vif-plugged-03697e78-5935-45aa-a1d5-1bf8701e3f56 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 589.625478] env[65503]: WARNING nova.compute.manager [req-a37adfe8-12a6-4865-ab5e-006022a16fcb req-ea1a7ace-0b6a-4dc7-98ea-0c9c465b6773 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Received unexpected event network-vif-plugged-03697e78-5935-45aa-a1d5-1bf8701e3f56 for instance with vm_state building and task_state spawning. [ 589.637567] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Created folder: Instances in parent group-v870194. [ 589.637748] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 589.637986] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 589.638241] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce816dc0-959b-4752-a798-dbd07bf0ea21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.665853] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.665853] env[65503]: value = "task-4449341" [ 589.665853] env[65503]: _type = "Task" [ 589.665853] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.684369] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449341, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.729999] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Preparing fetch location {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 589.730645] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Creating directory with path [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.730822] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ee90f6b-293d-4ed4-98f0-4a60e8bb671a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.753066] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Created directory with path [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.754428] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Fetch image to [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 589.754992] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Downloading image file data d68ffece-ab91-4610-b535-fa1fb25ade93 to [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk on the data store datastore2 {{(pid=65503) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 589.756963] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469236ae-a859-45e6-b9c4-f0e5f5134471 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.772682] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47385c5-7dc2-45ac-bbc5-32a6579ff589 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.784627] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94406d06-117a-4b74-807c-631404741f6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.823366] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e2d1b4-322b-400e-a3fc-07993dfae2af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.834023] env[65503]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dfb93f9c-3d97-47bd-8077-f49430e90f9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.933712] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Downloading image file data d68ffece-ab91-4610-b535-fa1fb25ade93 to the data store datastore2 {{(pid=65503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 589.939974] env[65503]: DEBUG nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 590.021832] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.799s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.023695] env[65503]: DEBUG nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 590.029917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.185s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.029984] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.030250] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 590.033623] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.707s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.033623] env[65503]: INFO nova.compute.claims [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.037930] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df49b203-b685-443b-970b-2d46a1375040 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.051108] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f881254-48be-4bdf-afdf-182281eb90b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.082923] env[65503]: DEBUG oslo_vmware.rw_handles [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=65503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 590.085372] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9039cc73-77bd-44aa-bd88-2f0425072f9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.154932] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ccd676-a608-4676-8351-594432fadd54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.198267] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180701MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 590.198414] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.208784] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449341, 'name': CreateVM_Task, 'duration_secs': 0.382737} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.211508] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 590.211508] env[65503]: WARNING neutronclient.v2_0.client [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 590.211736] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 590.211959] env[65503]: WARNING openstack [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 590.268840] env[65503]: DEBUG nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 590.304445] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 590.304609] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 590.304705] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 590.305059] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 590.305059] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 590.305162] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 590.305360] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.305506] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 590.305662] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 590.305813] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 590.305971] env[65503]: DEBUG nova.virt.hardware [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 590.306881] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0bfe2f-b5c3-486d-a151-dbf23c6657fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.316532] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7d41b3-478c-4376-8f60-f108d874218a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.468192] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.543637] env[65503]: DEBUG nova.compute.utils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 590.543637] env[65503]: DEBUG nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 590.543637] env[65503]: DEBUG nova.network.neutron [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 590.543637] env[65503]: WARNING neutronclient.v2_0.client [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 590.544219] env[65503]: WARNING neutronclient.v2_0.client [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 590.544889] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 590.545303] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 590.733396] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "50f11559-b8c7-41a2-aa43-255a28ffa58c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.733612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "50f11559-b8c7-41a2-aa43-255a28ffa58c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.862665] env[65503]: DEBUG oslo_vmware.rw_handles [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Completed reading data from the image iterator. {{(pid=65503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 590.862897] env[65503]: DEBUG oslo_vmware.rw_handles [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 591.066389] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.066729] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.067196] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 591.069193] env[65503]: DEBUG nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 591.074041] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f315231b-363b-4b88-9ae3-cff8dab9efe5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.075637] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Downloaded image file data d68ffece-ab91-4610-b535-fa1fb25ade93 to vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk on the data store datastore2 {{(pid=65503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 591.077039] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Caching image {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 591.077262] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Copying Virtual Disk [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk to [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 591.077956] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27a930c1-50f6-4f15-8d64-e3f6c748e740 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.084850] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 591.084850] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5209af04-4734-f587-9bbf-8e04f7e8da19" [ 591.084850] env[65503]: _type = "Task" [ 591.084850] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.096826] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5209af04-4734-f587-9bbf-8e04f7e8da19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.098159] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 591.098159] env[65503]: value = "task-4449342" [ 591.098159] env[65503]: _type = "Task" [ 591.098159] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.106569] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.192529] env[65503]: DEBUG nova.policy [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '742b487a3a074556a0763b5f59563624', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '810a8d23b638450180bc37c0a952bcf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 591.236789] env[65503]: DEBUG nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 591.251994] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013a2041-2d9b-470e-970d-49d61e1b5b71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.260683] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d13c21-be55-4d29-905c-93a799d1fded {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.292860] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1de6ff9-7ab3-483c-87be-ffdd344f1071 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.301791] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eed162d-1c4b-4c85-b70f-547363b4a1f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.317267] env[65503]: DEBUG nova.compute.provider_tree [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.343535] env[65503]: DEBUG nova.network.neutron [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Successfully updated port: c6e9e5bd-a5fa-4b57-9707-33c2c2106702 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 591.604215] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.605569] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.605569] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.613630] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449342, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.775658] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.821242] env[65503]: DEBUG nova.scheduler.client.report [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 591.846973] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "refresh_cache-c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.847195] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired lock "refresh_cache-c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.847373] env[65503]: DEBUG nova.network.neutron [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 592.086527] env[65503]: DEBUG nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 592.112996] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681831} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.113306] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Copied Virtual Disk [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk to [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 592.113482] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Deleting the datastore file [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 592.113739] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3399b93-a33a-4523-9779-c49750163aee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.123684] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 592.123684] env[65503]: value = "task-4449343" [ 592.123684] env[65503]: _type = "Task" [ 592.123684] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.133444] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.148715] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 592.148953] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 592.149143] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 592.149386] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 592.149544] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 592.149689] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 592.149927] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.150371] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 592.150790] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 592.150790] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 592.151429] env[65503]: DEBUG nova.virt.hardware [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 592.151833] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84eaaf39-4bea-4e42-8b41-a5745af4b28a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.155315] env[65503]: DEBUG nova.network.neutron [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Successfully created port: 2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 592.163438] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d02089-1b38-43ff-96b3-c1c945b36ae8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.330539] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 592.331286] env[65503]: DEBUG nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 592.333968] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.112s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.335670] env[65503]: INFO nova.compute.claims [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.350575] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 592.350979] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 592.634579] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026132} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.634829] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 592.635043] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Moving file from [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c/d68ffece-ab91-4610-b535-fa1fb25ade93 to [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93. {{(pid=65503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 592.635329] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-aea46f41-654e-4dcb-969a-6156be8dc943 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.643503] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 592.643503] env[65503]: value = "task-4449344" [ 592.643503] env[65503]: _type = "Task" [ 592.643503] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.652333] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449344, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.825770] env[65503]: DEBUG nova.network.neutron [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 592.845516] env[65503]: DEBUG nova.compute.utils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 592.850494] env[65503]: DEBUG nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 592.850772] env[65503]: DEBUG nova.network.neutron [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 592.851483] env[65503]: WARNING neutronclient.v2_0.client [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 592.851483] env[65503]: WARNING neutronclient.v2_0.client [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 592.852081] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 592.853508] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 593.156922] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449344, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.156378} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.156922] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] File moved {{(pid=65503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 593.156922] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Cleaning up location [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 593.156922] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Deleting the datastore file [datastore2] vmware_temp/f4e9ef7b-6241-42fd-89c2-907389da565c {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 593.156922] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-132ffa3e-913c-4037-9a07-4190c13bfb28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.162045] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 593.162045] env[65503]: value = "task-4449345" [ 593.162045] env[65503]: _type = "Task" [ 593.162045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.172125] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.359453] env[65503]: DEBUG nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 593.522823] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7b1c5a-206a-4019-8773-65c3226b255b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.531231] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40ea33b-5f3e-4513-b9e4-ee1042b00030 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.566277] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d8f3a7-d42f-4f55-b958-aae2a2050ffb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.575725] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b9544c-8a6b-4eeb-876c-a9e93468b885 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.591970] env[65503]: DEBUG nova.compute.provider_tree [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 593.682871] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036338} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.684690] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 593.687058] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eca1b9da-2041-4cdb-95a7-b063a921e530 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.696919] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 593.696919] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52adc4f3-0d76-4425-3b4a-cb0281f76e7a" [ 593.696919] env[65503]: _type = "Task" [ 593.696919] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.708729] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52adc4f3-0d76-4425-3b4a-cb0281f76e7a, 'name': SearchDatastore_Task, 'duration_secs': 0.010368} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.709107] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.709264] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] c6aecf44-9a23-47a2-b1aa-6530b4119b1d/c6aecf44-9a23-47a2-b1aa-6530b4119b1d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 593.709628] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 593.709710] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 593.709959] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-342bb626-bec6-478d-99ae-59f1c59b9c4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.712871] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbd19b60-b1a3-4229-9015-5ce01c3a08ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.722734] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 593.722734] env[65503]: value = "task-4449346" [ 593.722734] env[65503]: _type = "Task" [ 593.722734] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.728361] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 593.728631] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 593.731153] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ae496a4-b908-476e-9912-b45cff578c42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.736615] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.741678] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 593.741678] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5208436d-a97d-838e-4292-55f5c7139e9b" [ 593.741678] env[65503]: _type = "Task" [ 593.741678] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.750516] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5208436d-a97d-838e-4292-55f5c7139e9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.104992] env[65503]: DEBUG nova.network.neutron [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Successfully updated port: 2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 594.124191] env[65503]: ERROR nova.scheduler.client.report [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [req-cd91d7ba-c1d1-4b4c-9c68-52337ebf2e01] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cd91d7ba-c1d1-4b4c-9c68-52337ebf2e01"}]} [ 594.160461] env[65503]: DEBUG nova.scheduler.client.report [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 594.183096] env[65503]: DEBUG nova.scheduler.client.report [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 594.183328] env[65503]: DEBUG nova.compute.provider_tree [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.200398] env[65503]: DEBUG nova.scheduler.client.report [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 594.220392] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 594.220392] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 594.239795] env[65503]: DEBUG nova.scheduler.client.report [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 594.249773] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449346, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.265027] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5208436d-a97d-838e-4292-55f5c7139e9b, 'name': SearchDatastore_Task, 'duration_secs': 0.018758} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.270197] env[65503]: DEBUG nova.policy [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ddb7dc8858e4aa09c61dc232cb465eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5de0ae091db74426975a523e945110fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 594.272438] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-418feac8-2fc8-4c34-af14-7caeba5c651a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.279722] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 594.279722] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5207a1a8-3240-b975-3fbd-2c616a83a79f" [ 594.279722] env[65503]: _type = "Task" [ 594.279722] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.290266] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5207a1a8-3240-b975-3fbd-2c616a83a79f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.377935] env[65503]: DEBUG nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 594.410336] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 594.410571] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 594.410713] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 594.410902] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 594.411026] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 594.411167] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 594.411360] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.411535] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 594.411649] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 594.411796] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 594.412093] env[65503]: DEBUG nova.virt.hardware [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 594.413040] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ef1348-34b5-4e5f-8be7-1a84d373eb14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.423169] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8652a9bf-4cd6-4859-8399-1fa627b2162c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.443947] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99411f7-54e4-4409-ae3b-038b53614450 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.451705] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d38a53e-dd7b-4cfe-a6e5-cb8afb3dc9ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.484065] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcd8b1d-2962-4f40-b87b-f6ad32c16999 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.493227] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eceb9da4-b940-493f-9a4c-6018cceae555 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.510509] env[65503]: DEBUG nova.compute.provider_tree [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.609410] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "refresh_cache-02b3b44e-96bb-47a0-8aa0-7026d987cad8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.609583] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquired lock "refresh_cache-02b3b44e-96bb-47a0-8aa0-7026d987cad8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.609756] env[65503]: DEBUG nova.network.neutron [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 594.745507] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449346, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.796521] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5207a1a8-3240-b975-3fbd-2c616a83a79f, 'name': SearchDatastore_Task, 'duration_secs': 0.020219} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.798042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.798042] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 39266117-e82e-48ae-932a-be04b1a7351a/39266117-e82e-48ae-932a-be04b1a7351a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 594.798042] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14586d60-aff2-455c-a27a-79b61054871c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.806145] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 594.806145] env[65503]: value = "task-4449347" [ 594.806145] env[65503]: _type = "Task" [ 594.806145] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.815771] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449347, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.944650] env[65503]: DEBUG nova.network.neutron [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Successfully created port: 6164c166-4054-4e4a-93fb-6e84abe74f7d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 595.053219] env[65503]: DEBUG nova.scheduler.client.report [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 13 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 595.053500] env[65503]: DEBUG nova.compute.provider_tree [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 13 to 14 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 595.053672] env[65503]: DEBUG nova.compute.provider_tree [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 595.112838] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 595.113310] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 595.130765] env[65503]: WARNING neutronclient.v2_0.client [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 595.131523] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 595.132724] env[65503]: WARNING openstack [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 595.231351] env[65503]: DEBUG nova.network.neutron [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 595.247305] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449346, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.499358} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.247620] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] c6aecf44-9a23-47a2-b1aa-6530b4119b1d/c6aecf44-9a23-47a2-b1aa-6530b4119b1d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 595.247965] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 595.248179] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d6fae78-4d18-4740-948e-6d5e8f26e755 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.256975] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 595.256975] env[65503]: value = "task-4449348" [ 595.256975] env[65503]: _type = "Task" [ 595.256975] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.269440] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449348, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.318733] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449347, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.491475] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 595.491846] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 595.536218] env[65503]: DEBUG nova.network.neutron [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Updating instance_info_cache with network_info: [{"id": "c6e9e5bd-a5fa-4b57-9707-33c2c2106702", "address": "fa:16:3e:c5:e0:04", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e9e5bd-a5", "ovs_interfaceid": "c6e9e5bd-a5fa-4b57-9707-33c2c2106702", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 595.561393] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.227s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.561986] env[65503]: DEBUG nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 595.566924] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.411s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.569040] env[65503]: INFO nova.compute.claims [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.771363] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449348, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07307} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.771631] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 595.772591] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eb2d35-4fe4-4ba7-b015-f460c8378511 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.796944] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] c6aecf44-9a23-47a2-b1aa-6530b4119b1d/c6aecf44-9a23-47a2-b1aa-6530b4119b1d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.797887] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9ac6a2d-ed28-4d90-946e-5233c50f7176 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.822806] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449347, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.879889} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.824766] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 39266117-e82e-48ae-932a-be04b1a7351a/39266117-e82e-48ae-932a-be04b1a7351a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 595.824973] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 595.825287] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 595.825287] env[65503]: value = "task-4449349" [ 595.825287] env[65503]: _type = "Task" [ 595.825287] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.825838] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a18763a-2dd2-4c88-bdd9-13d44c2cd350 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.838655] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.839834] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 595.839834] env[65503]: value = "task-4449350" [ 595.839834] env[65503]: _type = "Task" [ 595.839834] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.940434] env[65503]: WARNING neutronclient.v2_0.client [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 595.941260] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 595.941776] env[65503]: WARNING openstack [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 596.042275] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Releasing lock "refresh_cache-c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.042660] env[65503]: DEBUG nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Instance network_info: |[{"id": "c6e9e5bd-a5fa-4b57-9707-33c2c2106702", "address": "fa:16:3e:c5:e0:04", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e9e5bd-a5", "ovs_interfaceid": "c6e9e5bd-a5fa-4b57-9707-33c2c2106702", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 596.043192] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:e0:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6e9e5bd-a5fa-4b57-9707-33c2c2106702', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.053650] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Creating folder: Project (c0f1a7c2c68a498c8ffa173e9778d59d). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.054047] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1def5a7-53c1-4c9e-89ea-d27166a5d3dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.069027] env[65503]: DEBUG nova.compute.utils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 596.071664] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Created folder: Project (c0f1a7c2c68a498c8ffa173e9778d59d) in parent group-v870190. [ 596.071664] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Creating folder: Instances. Parent ref: group-v870197. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.071939] env[65503]: DEBUG nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 596.072174] env[65503]: DEBUG nova.network.neutron [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 596.072691] env[65503]: WARNING neutronclient.v2_0.client [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 596.073020] env[65503]: WARNING neutronclient.v2_0.client [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 596.073879] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 596.074377] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 596.082010] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63008c40-820a-48c4-93a8-1ed6de90e1cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.097102] env[65503]: DEBUG nova.network.neutron [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Updating instance_info_cache with network_info: [{"id": "2afefd06-f1b1-4227-b8ea-28c0ccdf5b69", "address": "fa:16:3e:3d:66:35", "network": {"id": "63d8b798-8291-48f6-9bf6-ae2496aba792", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-193093434-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "810a8d23b638450180bc37c0a952bcf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2afefd06-f1", "ovs_interfaceid": "2afefd06-f1b1-4227-b8ea-28c0ccdf5b69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 596.101461] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Created folder: Instances in parent group-v870197. [ 596.101676] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 596.101880] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 596.102106] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d8d9721-8a97-4843-bf1a-797675419ebf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.125829] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.125829] env[65503]: value = "task-4449353" [ 596.125829] env[65503]: _type = "Task" [ 596.125829] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.135670] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449353, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.192293] env[65503]: DEBUG nova.policy [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3646d1b47ef2405f8b3c82f51556b4d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c519075bc624e7b90915354752765da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.340196] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449349, 'name': ReconfigVM_Task, 'duration_secs': 0.360837} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.340196] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Reconfigured VM instance instance-00000002 to attach disk [datastore2] c6aecf44-9a23-47a2-b1aa-6530b4119b1d/c6aecf44-9a23-47a2-b1aa-6530b4119b1d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.340196] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de0f9f9b-911e-4cec-92cf-a423444dc059 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.350570] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449350, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081378} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.352251] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 596.352581] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 596.352581] env[65503]: value = "task-4449354" [ 596.352581] env[65503]: _type = "Task" [ 596.352581] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.353315] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202f101c-26f7-43f0-8cc7-c8824f880806 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.367373] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449354, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.393393] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 39266117-e82e-48ae-932a-be04b1a7351a/39266117-e82e-48ae-932a-be04b1a7351a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 596.394315] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9572c79e-e32c-45f9-9fcd-820a376c851c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.418282] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 596.418282] env[65503]: value = "task-4449355" [ 596.418282] env[65503]: _type = "Task" [ 596.418282] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.430519] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.572531] env[65503]: DEBUG nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 596.586726] env[65503]: DEBUG nova.network.neutron [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Successfully created port: eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 596.601256] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Releasing lock "refresh_cache-02b3b44e-96bb-47a0-8aa0-7026d987cad8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.601950] env[65503]: DEBUG nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Instance network_info: |[{"id": "2afefd06-f1b1-4227-b8ea-28c0ccdf5b69", "address": "fa:16:3e:3d:66:35", "network": {"id": "63d8b798-8291-48f6-9bf6-ae2496aba792", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-193093434-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "810a8d23b638450180bc37c0a952bcf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2afefd06-f1", "ovs_interfaceid": "2afefd06-f1b1-4227-b8ea-28c0ccdf5b69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 596.602477] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:66:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2afefd06-f1b1-4227-b8ea-28c0ccdf5b69', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.611400] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Creating folder: Project (810a8d23b638450180bc37c0a952bcf4). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.611683] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ceb021b0-5ff5-4df0-8102-9a71ce931b81 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.626456] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Created folder: Project (810a8d23b638450180bc37c0a952bcf4) in parent group-v870190. [ 596.626456] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Creating folder: Instances. Parent ref: group-v870200. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.629470] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6e93dcc-873b-451e-883f-edc404fe6785 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.642774] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449353, 'name': CreateVM_Task, 'duration_secs': 0.378832} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.643542] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 596.643542] env[65503]: WARNING neutronclient.v2_0.client [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 596.643960] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.644117] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.644467] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 596.646000] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-426d7129-482a-4db0-b96f-b28a40897819 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.648088] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Created folder: Instances in parent group-v870200. [ 596.648088] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 596.648333] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 596.649135] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b49bd0c2-0ab8-48cd-842a-f387a6e47157 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.669322] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 596.669322] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524b0210-8db1-ccd8-8670-084476988f95" [ 596.669322] env[65503]: _type = "Task" [ 596.669322] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.679146] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.679146] env[65503]: value = "task-4449358" [ 596.679146] env[65503]: _type = "Task" [ 596.679146] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.683719] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524b0210-8db1-ccd8-8670-084476988f95, 'name': SearchDatastore_Task, 'duration_secs': 0.027627} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.690301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.690301] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.690301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.690301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.690503] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 596.691493] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43021e92-9502-47e2-929c-9ce1673cbedc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.694473] env[65503]: DEBUG nova.network.neutron [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Successfully updated port: 6164c166-4054-4e4a-93fb-6e84abe74f7d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 596.702480] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449358, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.704035] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 596.704299] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 596.705162] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-425a8b78-869f-43c2-8ab9-b215df8f2730 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.715572] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 596.715572] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bef8b5-c437-d34a-0b4f-9198b1881773" [ 596.715572] env[65503]: _type = "Task" [ 596.715572] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.726404] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bef8b5-c437-d34a-0b4f-9198b1881773, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.824511] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1daae2-ced2-4cc4-83c1-b82f53c92166 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.835318] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e4d1d3-f31b-4a79-88fa-fa049e0f00fa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.871275] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f958da3a-e68a-4865-bebb-5678d339912a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.880094] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449354, 'name': Rename_Task, 'duration_secs': 0.192768} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.882602] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 596.883028] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9bd5369-9289-4ec1-b79f-f68f1cdbf8d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.886078] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c894ccc7-365e-4b00-8c07-2102d69941e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.902518] env[65503]: DEBUG nova.compute.provider_tree [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.905399] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 596.905399] env[65503]: value = "task-4449359" [ 596.905399] env[65503]: _type = "Task" [ 596.905399] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.915310] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449359, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.930354] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449355, 'name': ReconfigVM_Task, 'duration_secs': 0.349818} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.930708] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 39266117-e82e-48ae-932a-be04b1a7351a/39266117-e82e-48ae-932a-be04b1a7351a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.931361] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a3923e8-4ec1-49f8-acad-e794ce197b90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.940699] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 596.940699] env[65503]: value = "task-4449360" [ 596.940699] env[65503]: _type = "Task" [ 596.940699] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.950437] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449360, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.194812] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449358, 'name': CreateVM_Task, 'duration_secs': 0.339707} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.194992] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 597.195758] env[65503]: WARNING neutronclient.v2_0.client [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 597.196210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.196368] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.196970] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 597.197646] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "refresh_cache-ad85eef0-cef7-4900-b193-1737a6c2f17b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.197852] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "refresh_cache-ad85eef0-cef7-4900-b193-1737a6c2f17b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.198077] env[65503]: DEBUG nova.network.neutron [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 597.199242] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c209a2ca-7957-44bc-a659-53970c75e4f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.208028] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 597.208028] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f983c0-6fef-4b04-a5c3-ce70fbbf8efd" [ 597.208028] env[65503]: _type = "Task" [ 597.208028] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.219557] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f983c0-6fef-4b04-a5c3-ce70fbbf8efd, 'name': SearchDatastore_Task, 'duration_secs': 0.009568} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.222942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.223201] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 597.223410] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.231280] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bef8b5-c437-d34a-0b4f-9198b1881773, 'name': SearchDatastore_Task, 'duration_secs': 0.019376} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.232358] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cee63b3-e7c6-4493-ba5c-825dcdc1d503 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.238477] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 597.238477] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b98034-25ad-d11f-dd01-2f14bb5b70fd" [ 597.238477] env[65503]: _type = "Task" [ 597.238477] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.247798] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b98034-25ad-d11f-dd01-2f14bb5b70fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.254178] env[65503]: DEBUG nova.compute.manager [req-594d8fc5-b8ec-4ad4-9b9f-8d7c53c884ae req-9a08985d-288c-443a-a4ec-b9bca2fecfd3 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Received event network-vif-plugged-c6e9e5bd-a5fa-4b57-9707-33c2c2106702 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 597.254178] env[65503]: DEBUG oslo_concurrency.lockutils [req-594d8fc5-b8ec-4ad4-9b9f-8d7c53c884ae req-9a08985d-288c-443a-a4ec-b9bca2fecfd3 service nova] Acquiring lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.254178] env[65503]: DEBUG oslo_concurrency.lockutils [req-594d8fc5-b8ec-4ad4-9b9f-8d7c53c884ae req-9a08985d-288c-443a-a4ec-b9bca2fecfd3 service nova] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.254449] env[65503]: DEBUG oslo_concurrency.lockutils [req-594d8fc5-b8ec-4ad4-9b9f-8d7c53c884ae req-9a08985d-288c-443a-a4ec-b9bca2fecfd3 service nova] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.254488] env[65503]: DEBUG nova.compute.manager [req-594d8fc5-b8ec-4ad4-9b9f-8d7c53c884ae req-9a08985d-288c-443a-a4ec-b9bca2fecfd3 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] No waiting events found dispatching network-vif-plugged-c6e9e5bd-a5fa-4b57-9707-33c2c2106702 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 597.254682] env[65503]: WARNING nova.compute.manager [req-594d8fc5-b8ec-4ad4-9b9f-8d7c53c884ae req-9a08985d-288c-443a-a4ec-b9bca2fecfd3 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Received unexpected event network-vif-plugged-c6e9e5bd-a5fa-4b57-9707-33c2c2106702 for instance with vm_state building and task_state spawning. [ 597.282055] env[65503]: DEBUG nova.compute.manager [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Received event network-changed-03697e78-5935-45aa-a1d5-1bf8701e3f56 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 597.282194] env[65503]: DEBUG nova.compute.manager [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Refreshing instance network info cache due to event network-changed-03697e78-5935-45aa-a1d5-1bf8701e3f56. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 597.282409] env[65503]: DEBUG oslo_concurrency.lockutils [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Acquiring lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.282549] env[65503]: DEBUG oslo_concurrency.lockutils [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Acquired lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.282702] env[65503]: DEBUG nova.network.neutron [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Refreshing network info cache for port 03697e78-5935-45aa-a1d5-1bf8701e3f56 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 597.409941] env[65503]: DEBUG nova.scheduler.client.report [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 597.421465] env[65503]: DEBUG oslo_vmware.api [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449359, 'name': PowerOnVM_Task, 'duration_secs': 0.478277} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.421738] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 597.422501] env[65503]: INFO nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Took 9.46 seconds to spawn the instance on the hypervisor. [ 597.422873] env[65503]: DEBUG nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 597.423704] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3dccd9-c9b0-4d9b-8aca-a3451c1fe0b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.451327] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449360, 'name': Rename_Task, 'duration_secs': 0.147504} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.451595] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 597.451831] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ce6e1ee-79e4-4a38-87b5-4eaa1bbbe42b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.459326] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 597.459326] env[65503]: value = "task-4449361" [ 597.459326] env[65503]: _type = "Task" [ 597.459326] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.468574] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.583835] env[65503]: DEBUG nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 597.610775] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 597.611039] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.611220] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 597.611405] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.611540] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 597.611678] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 597.611875] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.612050] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 597.612267] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 597.612385] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 597.612547] env[65503]: DEBUG nova.virt.hardware [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 597.614423] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae3ffab-219d-4160-84c5-5db53ff2dcaa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.622972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c55664-5a39-4091-b600-3da7c2538bfb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.704183] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 597.704577] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 597.754483] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b98034-25ad-d11f-dd01-2f14bb5b70fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009365} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.756392] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.756392] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3/c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.756392] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.756392] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.756687] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d0744d7-caab-49b0-95d9-4954ffad0232 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.759356] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17a48ab1-6b22-4e38-8c9e-642085e36ff1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.771928] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "db942a2d-671b-4036-a80b-d2375145cd29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.772218] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "db942a2d-671b-4036-a80b-d2375145cd29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.773354] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.774147] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.774296] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 597.774296] env[65503]: value = "task-4449362" [ 597.774296] env[65503]: _type = "Task" [ 597.774296] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.775101] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ee46281-2c65-43c6-8911-068ee9f1618a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.782436] env[65503]: DEBUG nova.network.neutron [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 597.786115] env[65503]: WARNING neutronclient.v2_0.client [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 597.786842] env[65503]: WARNING openstack [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 597.789773] env[65503]: WARNING openstack [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 597.802847] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 597.802847] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52639fde-3d9d-a693-1308-29de0e965f6c" [ 597.802847] env[65503]: _type = "Task" [ 597.802847] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.810600] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.821802] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52639fde-3d9d-a693-1308-29de0e965f6c, 'name': SearchDatastore_Task, 'duration_secs': 0.012605} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.821802] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2adcdb9f-9ca7-41d2-bdb2-f450ea525ec8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.828746] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 597.828746] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520f6069-5354-c697-3c8c-dca522824e2a" [ 597.828746] env[65503]: _type = "Task" [ 597.828746] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.841085] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520f6069-5354-c697-3c8c-dca522824e2a, 'name': SearchDatastore_Task, 'duration_secs': 0.009963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.841085] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.841085] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 02b3b44e-96bb-47a0-8aa0-7026d987cad8/02b3b44e-96bb-47a0-8aa0-7026d987cad8.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.841346] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdab3743-c411-4720-bf20-50e22f02b530 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.849621] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 597.849621] env[65503]: value = "task-4449363" [ 597.849621] env[65503]: _type = "Task" [ 597.849621] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.861119] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.917535] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.918211] env[65503]: DEBUG nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 597.922932] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.724s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.943071] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 597.943071] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 597.957876] env[65503]: INFO nova.compute.manager [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Took 14.96 seconds to build instance. [ 597.971521] env[65503]: DEBUG oslo_vmware.api [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449361, 'name': PowerOnVM_Task, 'duration_secs': 0.456098} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.971521] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 597.971784] env[65503]: INFO nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Took 12.24 seconds to spawn the instance on the hypervisor. [ 597.971863] env[65503]: DEBUG nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 597.972759] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69677e92-ee48-40b2-8878-5cc0baa6bbd2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.140777] env[65503]: WARNING openstack [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 598.141182] env[65503]: WARNING openstack [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 598.173728] env[65503]: WARNING neutronclient.v2_0.client [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 598.174419] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 598.174757] env[65503]: WARNING openstack [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 598.275750] env[65503]: DEBUG nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 598.293856] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449362, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.361961] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449363, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.398732] env[65503]: DEBUG nova.network.neutron [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Successfully updated port: eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 598.434218] env[65503]: DEBUG nova.compute.utils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 598.443672] env[65503]: DEBUG nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 598.444773] env[65503]: DEBUG nova.network.neutron [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 598.445564] env[65503]: WARNING neutronclient.v2_0.client [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 598.448479] env[65503]: WARNING neutronclient.v2_0.client [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 598.449163] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 598.449531] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 598.460209] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6667c40e-bad2-4074-8e2e-e752bd6f6ab3 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.472s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.502593] env[65503]: INFO nova.compute.manager [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Took 17.01 seconds to build instance. [ 598.579174] env[65503]: WARNING neutronclient.v2_0.client [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 598.579979] env[65503]: WARNING openstack [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 598.580625] env[65503]: WARNING openstack [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 598.633555] env[65503]: DEBUG nova.network.neutron [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Updating instance_info_cache with network_info: [{"id": "6164c166-4054-4e4a-93fb-6e84abe74f7d", "address": "fa:16:3e:1e:c2:35", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6164c166-40", "ovs_interfaceid": "6164c166-4054-4e4a-93fb-6e84abe74f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 598.795049] env[65503]: DEBUG nova.policy [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55a1af9c206548fd916052b6c260055c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b98fe43dcbf04e7d9ad4ba82b5b64f73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 598.805825] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632714} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.807277] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3/c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.807277] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.807277] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5afb979e-4f27-432a-8373-06e009120414 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.815282] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.817444] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 598.817444] env[65503]: value = "task-4449364" [ 598.817444] env[65503]: _type = "Task" [ 598.817444] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.830136] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449364, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.841588] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "eec6a484-ab00-402e-a369-c3009065c553" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.841971] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "eec6a484-ab00-402e-a369-c3009065c553" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.863354] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449363, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.882938} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.863621] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 02b3b44e-96bb-47a0-8aa0-7026d987cad8/02b3b44e-96bb-47a0-8aa0-7026d987cad8.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.863828] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.864102] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75506b27-9ff1-4e18-88f0-adf0219e3b0f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.870394] env[65503]: DEBUG nova.network.neutron [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updated VIF entry in instance network info cache for port 03697e78-5935-45aa-a1d5-1bf8701e3f56. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 598.870726] env[65503]: DEBUG nova.network.neutron [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance_info_cache with network_info: [{"id": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "address": "fa:16:3e:34:a3:37", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03697e78-59", "ovs_interfaceid": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 598.873696] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 598.873696] env[65503]: value = "task-4449365" [ 598.873696] env[65503]: _type = "Task" [ 598.873696] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.888382] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449365, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.903240] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.903485] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquired lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.903694] env[65503]: DEBUG nova.network.neutron [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 598.942798] env[65503]: DEBUG nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 598.997965] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 39266117-e82e-48ae-932a-be04b1a7351a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 598.998159] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance c6aecf44-9a23-47a2-b1aa-6530b4119b1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 598.998435] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 598.998435] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 02b3b44e-96bb-47a0-8aa0-7026d987cad8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 598.998548] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ad85eef0-cef7-4900-b193-1737a6c2f17b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 598.998599] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 598.998699] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2dbc3860-c65c-4cbb-8d90-f1f74420e652 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 599.004514] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8ce4dab-85e3-4e65-b76a-92644a1d066a tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.527s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.136335] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "refresh_cache-ad85eef0-cef7-4900-b193-1737a6c2f17b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.136723] env[65503]: DEBUG nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance network_info: |[{"id": "6164c166-4054-4e4a-93fb-6e84abe74f7d", "address": "fa:16:3e:1e:c2:35", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6164c166-40", "ovs_interfaceid": "6164c166-4054-4e4a-93fb-6e84abe74f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 599.137242] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:c2:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6164c166-4054-4e4a-93fb-6e84abe74f7d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.146713] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating folder: Project (5de0ae091db74426975a523e945110fa). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 599.148049] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-467ee79a-440b-4dd9-9f11-8442bcb99ac8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.161037] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created folder: Project (5de0ae091db74426975a523e945110fa) in parent group-v870190. [ 599.161037] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating folder: Instances. Parent ref: group-v870203. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 599.161037] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfa4fe98-43f5-4117-b49b-b8ca391bf224 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.173059] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created folder: Instances in parent group-v870203. [ 599.173059] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 599.173059] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 599.173059] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3101e2cb-ee1f-41fb-9347-434016215bf4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.194405] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.194405] env[65503]: value = "task-4449368" [ 599.194405] env[65503]: _type = "Task" [ 599.194405] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.204846] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449368, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.326309] env[65503]: DEBUG nova.network.neutron [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Successfully created port: b5328355-0841-4441-9689-a82ea7088346 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 599.337270] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449364, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072092} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.338111] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 599.338939] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d803fa1-e3e6-420a-9954-198eb6399134 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.349021] env[65503]: DEBUG nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 599.367336] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3/c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 599.371902] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e1e8a4e-c6de-4bfd-b739-7959570771c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.393153] env[65503]: DEBUG oslo_concurrency.lockutils [req-324e9199-78d7-4683-904b-9325175904b3 req-0e3644fd-76b3-40b4-82fb-6755821ddc57 service nova] Releasing lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.403954] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449365, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13692} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.406553] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 599.406763] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 599.406763] env[65503]: value = "task-4449369" [ 599.406763] env[65503]: _type = "Task" [ 599.406763] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.408024] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf69e95-a82d-4334-95ee-bd3e0cd1ac26 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.411940] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 599.412364] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 599.433570] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449369, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.454041] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 02b3b44e-96bb-47a0-8aa0-7026d987cad8/02b3b44e-96bb-47a0-8aa0-7026d987cad8.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 599.457530] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf3d068a-5f57-4daa-a21a-548ebe22a4d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.481353] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 599.481353] env[65503]: value = "task-4449370" [ 599.481353] env[65503]: _type = "Task" [ 599.481353] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.491411] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449370, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.509719] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 1e92795e-cf30-4175-9e31-c29278f3e9e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 599.640312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "5e2cf383-312b-404f-acff-2ecb75678600" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.640578] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "5e2cf383-312b-404f-acff-2ecb75678600" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.653119] env[65503]: DEBUG nova.network.neutron [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 599.709011] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449368, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.811766] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 599.812232] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 599.897042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.931018] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449369, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.976673] env[65503]: DEBUG nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 599.995805] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449370, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.009053] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 600.009435] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.009435] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 600.009623] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.009830] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 600.009916] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 600.010101] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.010783] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 600.010783] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 600.010783] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 600.010783] env[65503]: DEBUG nova.virt.hardware [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 600.011554] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 50f11559-b8c7-41a2-aa43-255a28ffa58c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 600.013649] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d23d06-e7a9-42af-ae06-5fe28da24ed8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.024324] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f672d430-d499-467c-91a7-9f5e2f375d6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.143016] env[65503]: DEBUG nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 600.208613] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449368, 'name': CreateVM_Task, 'duration_secs': 0.691786} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.208848] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 600.209378] env[65503]: WARNING neutronclient.v2_0.client [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 600.209805] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.209886] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.210249] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 600.210512] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5327e601-1c6b-48fa-afbe-0a0b1ed0715b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.216225] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 600.216225] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a5bfa6-29bf-867c-e237-cb6d50c4e7f6" [ 600.216225] env[65503]: _type = "Task" [ 600.216225] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.227431] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a5bfa6-29bf-867c-e237-cb6d50c4e7f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.429382] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449369, 'name': ReconfigVM_Task, 'duration_secs': 0.620928} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.429653] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Reconfigured VM instance instance-00000003 to attach disk [datastore2] c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3/c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.431615] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29ebf8d6-9601-4b83-b0a8-bbe61e375f26 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.439466] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 600.439466] env[65503]: value = "task-4449371" [ 600.439466] env[65503]: _type = "Task" [ 600.439466] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.452763] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449371, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.494132] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449370, 'name': ReconfigVM_Task, 'duration_secs': 0.556282} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.494696] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 02b3b44e-96bb-47a0-8aa0-7026d987cad8/02b3b44e-96bb-47a0-8aa0-7026d987cad8.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.495542] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51f914e3-0f56-4b52-8954-63b6e7e95a33 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.504456] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 600.504456] env[65503]: value = "task-4449372" [ 600.504456] env[65503]: _type = "Task" [ 600.504456] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.515313] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449372, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.518208] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance db942a2d-671b-4036-a80b-d2375145cd29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 600.673169] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.727618] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a5bfa6-29bf-867c-e237-cb6d50c4e7f6, 'name': SearchDatastore_Task, 'duration_secs': 0.028467} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.727958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.728198] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.728521] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.728640] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.729027] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.729398] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c58db5c-a91e-47d3-acee-e69a8258ae7a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.738134] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.738395] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 600.739170] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85ad0c17-de1b-42c4-a0e1-93aeca2c0c08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.746552] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 600.746552] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5271d424-e22d-530a-20cc-12a6388e6029" [ 600.746552] env[65503]: _type = "Task" [ 600.746552] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.755815] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5271d424-e22d-530a-20cc-12a6388e6029, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.887959] env[65503]: WARNING neutronclient.v2_0.client [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 600.888439] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 600.888803] env[65503]: WARNING openstack [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 600.950366] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449371, 'name': Rename_Task, 'duration_secs': 0.188848} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.950544] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.950881] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82ff03f6-dc5d-476b-aa03-ec0ccca4e4ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.958684] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 600.958684] env[65503]: value = "task-4449373" [ 600.958684] env[65503]: _type = "Task" [ 600.958684] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.967760] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.974399] env[65503]: DEBUG nova.network.neutron [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Successfully updated port: b5328355-0841-4441-9689-a82ea7088346 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 601.015343] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449372, 'name': Rename_Task, 'duration_secs': 0.176043} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.015627] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 601.015869] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a97e9e5a-3212-4559-8010-e4aa129a65dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.021861] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance eec6a484-ab00-402e-a369-c3009065c553 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 601.022299] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 601.022589] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '7', 'num_vm_building': '6', 'num_task_spawning': '5', 'num_os_type_None': '7', 'num_proj_771d20568f55445088cc06737c184615': '1', 'io_workload': '6', 'num_vm_active': '1', 'num_task_None': '2', 'num_proj_b6fd6581cea7434caff33b305a914d0d': '1', 'num_proj_c0f1a7c2c68a498c8ffa173e9778d59d': '1', 'num_proj_810a8d23b638450180bc37c0a952bcf4': '1', 'num_proj_5de0ae091db74426975a523e945110fa': '1', 'num_proj_9c519075bc624e7b90915354752765da': '1', 'num_proj_b98fe43dcbf04e7d9ad4ba82b5b64f73': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 601.027429] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 601.027429] env[65503]: value = "task-4449374" [ 601.027429] env[65503]: _type = "Task" [ 601.027429] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.037511] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449374, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.214052] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05079cec-1772-40d4-8a64-4878d6debbf5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.222283] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde9b282-bf8d-40e6-a188-4c0193813e78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.260030] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a43a6f-9f4f-4fa2-aa57-10c4e487d50c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.272839] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d002c2-3e67-4558-94de-5839bcde67b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.277017] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5271d424-e22d-530a-20cc-12a6388e6029, 'name': SearchDatastore_Task, 'duration_secs': 0.014236} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.278393] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3745726-4a00-4081-8a62-7d3ce177d40a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.289230] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.296300] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 601.296300] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df2bee-7f78-4f34-6527-26186200ce73" [ 601.296300] env[65503]: _type = "Task" [ 601.296300] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.309686] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df2bee-7f78-4f34-6527-26186200ce73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.439897] env[65503]: DEBUG nova.network.neutron [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Updating instance_info_cache with network_info: [{"id": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "address": "fa:16:3e:0d:d9:4c", "network": {"id": "cb51b25b-9fc3-434c-8e76-3c58b174b65d", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2062279965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c519075bc624e7b90915354752765da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb97a8c5-41", "ovs_interfaceid": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 601.470150] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449373, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.476303] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "refresh_cache-2dbc3860-c65c-4cbb-8d90-f1f74420e652" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.476496] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquired lock "refresh_cache-2dbc3860-c65c-4cbb-8d90-f1f74420e652" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.476687] env[65503]: DEBUG nova.network.neutron [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 601.539773] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449374, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.793543] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 601.808019] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df2bee-7f78-4f34-6527-26186200ce73, 'name': SearchDatastore_Task, 'duration_secs': 0.012836} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.808296] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.808541] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 601.809106] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8899e88b-0027-4335-9003-e03d186c8ee8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.819384] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 601.819384] env[65503]: value = "task-4449375" [ 601.819384] env[65503]: _type = "Task" [ 601.819384] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.830995] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449375, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.946072] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Releasing lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.946072] env[65503]: DEBUG nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Instance network_info: |[{"id": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "address": "fa:16:3e:0d:d9:4c", "network": {"id": "cb51b25b-9fc3-434c-8e76-3c58b174b65d", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2062279965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c519075bc624e7b90915354752765da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb97a8c5-41", "ovs_interfaceid": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 601.946526] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:d9:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd72ef32-a57c-43b0-93df-e8a030987d44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb97a8c5-41a9-42ff-80fe-382fbcdc440a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.958274] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Creating folder: Project (9c519075bc624e7b90915354752765da). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.959348] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-222db580-e855-421f-82da-524aaae9bf68 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.975075] env[65503]: DEBUG oslo_vmware.api [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449373, 'name': PowerOnVM_Task, 'duration_secs': 1.012885} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.975421] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 601.977041] env[65503]: INFO nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Took 11.71 seconds to spawn the instance on the hypervisor. [ 601.977041] env[65503]: DEBUG nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 601.977041] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643fdc9c-1060-4fcb-b379-5f8b6840b3ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.982697] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 601.983151] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 601.991126] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Created folder: Project (9c519075bc624e7b90915354752765da) in parent group-v870190. [ 601.991329] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Creating folder: Instances. Parent ref: group-v870206. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.992183] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12f666c2-d839-489e-84d8-3e52152d63cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.003427] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Created folder: Instances in parent group-v870206. [ 602.003670] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 602.003859] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.004098] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e39524f-7e88-4244-9420-fb03d6617166 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.025779] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.025779] env[65503]: value = "task-4449378" [ 602.025779] env[65503]: _type = "Task" [ 602.025779] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.038413] env[65503]: DEBUG oslo_vmware.api [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449374, 'name': PowerOnVM_Task, 'duration_secs': 0.712598} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.041790] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.042053] env[65503]: INFO nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Took 9.96 seconds to spawn the instance on the hypervisor. [ 602.042194] env[65503]: DEBUG nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 602.042664] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449378, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.043254] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bebc4a-e80b-469b-96e4-95bbe94cf231 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.056811] env[65503]: DEBUG nova.network.neutron [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 602.112537] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 602.112930] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 602.205868] env[65503]: WARNING neutronclient.v2_0.client [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 602.206183] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 602.206543] env[65503]: WARNING openstack [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 602.306280] env[65503]: DEBUG nova.network.neutron [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Updating instance_info_cache with network_info: [{"id": "b5328355-0841-4441-9689-a82ea7088346", "address": "fa:16:3e:bf:67:4a", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.136", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5328355-08", "ovs_interfaceid": "b5328355-0841-4441-9689-a82ea7088346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 602.308638] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 602.308879] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.386s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.309580] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.841s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.311168] env[65503]: INFO nova.compute.claims [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.339344] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449375, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.371947] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Received event network-changed-c6e9e5bd-a5fa-4b57-9707-33c2c2106702 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 602.371947] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Refreshing instance network info cache due to event network-changed-c6e9e5bd-a5fa-4b57-9707-33c2c2106702. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 602.372370] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquiring lock "refresh_cache-c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.372513] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquired lock "refresh_cache-c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.372679] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Refreshing network info cache for port c6e9e5bd-a5fa-4b57-9707-33c2c2106702 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 602.395507] env[65503]: DEBUG nova.compute.manager [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Received event network-vif-plugged-eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 602.395696] env[65503]: DEBUG oslo_concurrency.lockutils [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Acquiring lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.395882] env[65503]: DEBUG oslo_concurrency.lockutils [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.396769] env[65503]: DEBUG oslo_concurrency.lockutils [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.396769] env[65503]: DEBUG nova.compute.manager [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] No waiting events found dispatching network-vif-plugged-eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 602.396769] env[65503]: WARNING nova.compute.manager [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Received unexpected event network-vif-plugged-eb97a8c5-41a9-42ff-80fe-382fbcdc440a for instance with vm_state building and task_state spawning. [ 602.396965] env[65503]: DEBUG nova.compute.manager [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Received event network-changed-eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 602.397385] env[65503]: DEBUG nova.compute.manager [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Refreshing instance network info cache due to event network-changed-eb97a8c5-41a9-42ff-80fe-382fbcdc440a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 602.397667] env[65503]: DEBUG oslo_concurrency.lockutils [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Acquiring lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.397854] env[65503]: DEBUG oslo_concurrency.lockutils [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Acquired lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.398145] env[65503]: DEBUG nova.network.neutron [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Refreshing network info cache for port eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 602.507432] env[65503]: INFO nova.compute.manager [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Took 18.11 seconds to build instance. [ 602.541538] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449378, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.561937] env[65503]: INFO nova.compute.manager [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Took 17.83 seconds to build instance. [ 602.764187] env[65503]: DEBUG nova.compute.manager [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 602.767858] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb4db43-9b4e-4336-ae2f-453eca05a7b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.810896] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Releasing lock "refresh_cache-2dbc3860-c65c-4cbb-8d90-f1f74420e652" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.812878] env[65503]: DEBUG nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Instance network_info: |[{"id": "b5328355-0841-4441-9689-a82ea7088346", "address": "fa:16:3e:bf:67:4a", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.136", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5328355-08", "ovs_interfaceid": "b5328355-0841-4441-9689-a82ea7088346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 602.815026] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:67:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5328355-0841-4441-9689-a82ea7088346', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 602.822174] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Creating folder: Project (b98fe43dcbf04e7d9ad4ba82b5b64f73). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.824967] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6db43143-9e7e-4488-b78c-c6209b2134d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.836616] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449375, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722193} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.836925] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 602.837204] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 602.837511] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c2d366f-439a-4f31-9320-ce2c52842661 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.844161] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Created folder: Project (b98fe43dcbf04e7d9ad4ba82b5b64f73) in parent group-v870190. [ 602.844161] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Creating folder: Instances. Parent ref: group-v870209. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.844485] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-026cde27-3ba2-4bd0-b0d2-4181504c3afb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.848977] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 602.848977] env[65503]: value = "task-4449380" [ 602.848977] env[65503]: _type = "Task" [ 602.848977] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.860774] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.862623] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Created folder: Instances in parent group-v870209. [ 602.863078] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 602.863390] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.863659] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a0a4ad3-6630-4968-bd92-ae227a1288ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.880264] env[65503]: WARNING neutronclient.v2_0.client [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 602.881100] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 602.881543] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 602.897542] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.897542] env[65503]: value = "task-4449382" [ 602.897542] env[65503]: _type = "Task" [ 602.897542] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.901322] env[65503]: WARNING neutronclient.v2_0.client [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 602.902201] env[65503]: WARNING openstack [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 602.903314] env[65503]: WARNING openstack [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 602.918679] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449382, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.009547] env[65503]: DEBUG oslo_concurrency.lockutils [None req-30172a5d-1a37-4be0-a7a8-087e051889a5 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.621s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.043906] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449378, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.065322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beb3d50c-e157-4e8d-bf45-18acbc77cc2b tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.355s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.281875] env[65503]: INFO nova.compute.manager [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] instance snapshotting [ 603.282854] env[65503]: DEBUG nova.objects.instance [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lazy-loading 'flavor' on Instance uuid c6aecf44-9a23-47a2-b1aa-6530b4119b1d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 603.364705] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074799} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.365109] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 603.366243] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1691bc55-5564-433b-92ca-42351a689f88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.392733] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.396854] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf83d33b-316f-42aa-9344-eff3239b8827 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.423372] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449382, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.425012] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 603.425012] env[65503]: value = "task-4449383" [ 603.425012] env[65503]: _type = "Task" [ 603.425012] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.438902] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.545975] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449378, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.577541] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd80124-9673-4fd5-add1-bdd99638fb80 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.585544] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db755b84-3d0e-45be-ba7c-badef821b7e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.624189] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 603.624597] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 603.633275] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9cb916-950e-4b59-89e7-7c507a0a236b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.642352] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbe5606-8f0e-4cd5-bfd8-bdc446ac0128 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.658783] env[65503]: DEBUG nova.compute.provider_tree [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.664388] env[65503]: WARNING openstack [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 603.664388] env[65503]: WARNING openstack [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 603.671124] env[65503]: DEBUG nova.scheduler.client.report [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 603.793974] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f4044f-fe97-4abe-97b6-62e528cd6dbb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.797889] env[65503]: WARNING neutronclient.v2_0.client [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 603.798611] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 603.800254] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 603.827643] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99fbb36-9fc6-4b1d-aec2-18b31c3b531d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.853323] env[65503]: WARNING neutronclient.v2_0.client [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 603.853935] env[65503]: WARNING openstack [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 603.854279] env[65503]: WARNING openstack [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 603.925433] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449382, 'name': CreateVM_Task, 'duration_secs': 0.623936} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.925433] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 603.925433] env[65503]: WARNING neutronclient.v2_0.client [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 603.926178] env[65503]: DEBUG oslo_vmware.service [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6bd27f-f371-4a99-898e-269006861937 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.939443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.939668] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.940021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 603.941230] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dc49c0a-67b2-4aac-8335-8ef8105fee2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.946474] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.949495] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 603.949495] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d99551-de61-eb3a-3cd1-26207cf790f9" [ 603.949495] env[65503]: _type = "Task" [ 603.949495] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.958347] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d99551-de61-eb3a-3cd1-26207cf790f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.042330] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449378, 'name': CreateVM_Task, 'duration_secs': 1.688552} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.042510] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 604.043303] env[65503]: WARNING neutronclient.v2_0.client [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 604.043833] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.067748] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Updated VIF entry in instance network info cache for port c6e9e5bd-a5fa-4b57-9707-33c2c2106702. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 604.068154] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Updating instance_info_cache with network_info: [{"id": "c6e9e5bd-a5fa-4b57-9707-33c2c2106702", "address": "fa:16:3e:c5:e0:04", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e9e5bd-a5", "ovs_interfaceid": "c6e9e5bd-a5fa-4b57-9707-33c2c2106702", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 604.111369] env[65503]: DEBUG nova.network.neutron [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Updated VIF entry in instance network info cache for port eb97a8c5-41a9-42ff-80fe-382fbcdc440a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 604.111369] env[65503]: DEBUG nova.network.neutron [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Updating instance_info_cache with network_info: [{"id": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "address": "fa:16:3e:0d:d9:4c", "network": {"id": "cb51b25b-9fc3-434c-8e76-3c58b174b65d", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2062279965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c519075bc624e7b90915354752765da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb97a8c5-41", "ovs_interfaceid": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 604.178555] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.867s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.178555] env[65503]: DEBUG nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 604.180077] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.405s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.181566] env[65503]: INFO nova.compute.claims [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 604.343835] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 604.344591] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ec01fe98-c799-4623-912b-7b46d08f7721 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.353499] env[65503]: DEBUG oslo_vmware.api [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 604.353499] env[65503]: value = "task-4449384" [ 604.353499] env[65503]: _type = "Task" [ 604.353499] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.367432] env[65503]: DEBUG oslo_vmware.api [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449384, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.446240] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449383, 'name': ReconfigVM_Task, 'duration_secs': 0.881599} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.446240] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.446240] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3af58c92-6d4b-4d69-8eec-0a5779778a4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.457417] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 604.457417] env[65503]: value = "task-4449385" [ 604.457417] env[65503]: _type = "Task" [ 604.457417] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.465323] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.465577] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 604.465812] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.465944] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.466122] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 604.466458] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.466693] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 604.466923] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13dd0db8-18fb-42a3-b8d8-a1f44a20ed1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.472411] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44d71458-4ea6-4c18-99e4-550e34284896 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.474710] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449385, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.480313] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 604.480313] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c84f72-b411-96c7-33f5-8024371f9e09" [ 604.480313] env[65503]: _type = "Task" [ 604.480313] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.484567] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 604.484763] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 604.487282] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715cef33-5e7b-4f42-8063-d475e62e83aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.492014] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c84f72-b411-96c7-33f5-8024371f9e09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.497360] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-616c1000-26ec-4ca6-8c41-55fdf9519492 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.504063] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 604.504063] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5231edf2-7cf7-f80f-d2fc-ea33fee83c01" [ 604.504063] env[65503]: _type = "Task" [ 604.504063] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.513800] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5231edf2-7cf7-f80f-d2fc-ea33fee83c01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.570650] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Releasing lock "refresh_cache-c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.571094] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Received event network-vif-plugged-2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 604.571907] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquiring lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.571907] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.571907] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.572181] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] No waiting events found dispatching network-vif-plugged-2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 604.572444] env[65503]: WARNING nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Received unexpected event network-vif-plugged-2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 for instance with vm_state active and task_state None. [ 604.572652] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Received event network-changed-2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 604.575054] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Refreshing instance network info cache due to event network-changed-2afefd06-f1b1-4227-b8ea-28c0ccdf5b69. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 604.575054] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquiring lock "refresh_cache-02b3b44e-96bb-47a0-8aa0-7026d987cad8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.575054] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquired lock "refresh_cache-02b3b44e-96bb-47a0-8aa0-7026d987cad8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.575054] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Refreshing network info cache for port 2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 604.614351] env[65503]: DEBUG oslo_concurrency.lockutils [req-2d6c2d04-2188-4037-bce1-f835f5f598ba req-e1749b78-d268-47e5-8d27-6dab430edf68 service nova] Releasing lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.688796] env[65503]: DEBUG nova.compute.utils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 604.693441] env[65503]: DEBUG nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 604.696065] env[65503]: DEBUG nova.network.neutron [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 604.696065] env[65503]: WARNING neutronclient.v2_0.client [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 604.698865] env[65503]: WARNING neutronclient.v2_0.client [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 604.698865] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 604.699493] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 604.788599] env[65503]: DEBUG nova.policy [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ddb7dc8858e4aa09c61dc232cb465eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5de0ae091db74426975a523e945110fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 604.826638] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "a1908e71-31f9-4308-b4d6-7908d3208c5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.826638] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.865883] env[65503]: DEBUG oslo_vmware.api [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449384, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.970113] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449385, 'name': Rename_Task, 'duration_secs': 0.162065} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.970435] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 604.970678] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c30ca67b-ee3f-4f01-a52f-43f00c0092be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.979840] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 604.979840] env[65503]: value = "task-4449386" [ 604.979840] env[65503]: _type = "Task" [ 604.979840] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.993978] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.002275] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.003053] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.003053] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.019296] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Preparing fetch location {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 605.019777] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Creating directory with path [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.020466] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e73be765-d1f8-4c75-9bfe-3d3e668e7650 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.040316] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Created directory with path [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.040654] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Fetch image to [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 605.040840] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Downloading image file data d68ffece-ab91-4610-b535-fa1fb25ade93 to [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk on the data store datastore1 {{(pid=65503) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 605.041746] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a25acd-5794-442f-a4a7-7b3d7ba027c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.053716] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f33c49b-5176-48ee-8460-d2b1f6f586bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.066596] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ef436d-7098-4715-a012-a4be5871cce5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.110597] env[65503]: WARNING neutronclient.v2_0.client [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 605.111329] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.111673] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.120532] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7654e517-c142-49ba-b723-5f27cc2bc6a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.130470] env[65503]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-807bbc1a-1af5-4d7f-b15e-bfa5155b3f65 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.156217] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Downloading image file data d68ffece-ab91-4610-b535-fa1fb25ade93 to the data store datastore1 {{(pid=65503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 605.212967] env[65503]: DEBUG nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 605.261453] env[65503]: DEBUG oslo_vmware.rw_handles [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=65503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 605.327589] env[65503]: DEBUG nova.network.neutron [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Successfully created port: cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 605.332578] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.332840] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.332992] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.333180] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.333329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.335442] env[65503]: INFO nova.compute.manager [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Terminating instance [ 605.340336] env[65503]: DEBUG nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 605.367702] env[65503]: DEBUG oslo_vmware.api [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449384, 'name': CreateSnapshot_Task, 'duration_secs': 0.616736} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.370726] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 605.372752] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4edd346-5444-46b7-9bf9-36fc5d362845 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.449878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.450054] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.492477] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449386, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.636925] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.638045] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.662042] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf1048b-7b8d-47c1-a0ae-04d16d8cd894 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.672665] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9143f9b-f9f0-4ef0-bd45-bd3562ba31d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.714041] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3a5a3d-dd71-4997-ad24-5a1013e15a9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.723911] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07b74b9-fbda-4db7-b710-2211b93adf1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.744511] env[65503]: DEBUG nova.compute.provider_tree [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.846367] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "refresh_cache-c6aecf44-9a23-47a2-b1aa-6530b4119b1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.846622] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquired lock "refresh_cache-c6aecf44-9a23-47a2-b1aa-6530b4119b1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.847913] env[65503]: DEBUG nova.network.neutron [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 605.872462] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.892375] env[65503]: DEBUG nova.compute.manager [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Instance disappeared during snapshot {{(pid=65503) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4635}} [ 605.957488] env[65503]: DEBUG nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 605.972927] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Acquiring lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.973839] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.973839] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Acquiring lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.973839] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.973839] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.976122] env[65503]: INFO nova.compute.manager [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Terminating instance [ 605.993150] env[65503]: DEBUG oslo_vmware.api [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449386, 'name': PowerOnVM_Task, 'duration_secs': 0.714335} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.993525] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 605.994258] env[65503]: INFO nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Took 11.62 seconds to spawn the instance on the hypervisor. [ 605.994258] env[65503]: DEBUG nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 605.995228] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0862c2-b165-4cde-913d-33d201e9a2e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.070340] env[65503]: DEBUG oslo_vmware.rw_handles [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Completed reading data from the image iterator. {{(pid=65503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 606.070340] env[65503]: DEBUG oslo_vmware.rw_handles [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 606.114155] env[65503]: WARNING neutronclient.v2_0.client [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 606.115503] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 606.115927] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 606.157518] env[65503]: DEBUG nova.compute.manager [None req-13c8a6c1-512c-450d-81d4-8ed5ac483655 tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Found 0 images (rotation: 2) {{(pid=65503) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 606.218209] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Downloaded image file data d68ffece-ab91-4610-b535-fa1fb25ade93 to vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk on the data store datastore1 {{(pid=65503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 606.220441] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Caching image {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 606.220568] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Copying Virtual Disk [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk to [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 606.220867] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de1c8c64-f923-43bf-a91b-e5599e3a9165 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.235917] env[65503]: DEBUG nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 606.239131] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 606.239131] env[65503]: value = "task-4449387" [ 606.239131] env[65503]: _type = "Task" [ 606.239131] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.252206] env[65503]: DEBUG nova.scheduler.client.report [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 606.262255] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449387, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.280611] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 606.280611] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 606.280611] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 606.280913] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 606.280913] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 606.280913] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 606.280913] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 606.280913] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 606.281162] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 606.281162] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 606.281162] env[65503]: DEBUG nova.virt.hardware [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 606.281410] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73833f2-ae9e-43eb-8a2e-5122dbc86845 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.292029] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3583c849-9349-4774-8820-92b15a0e082a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.350625] env[65503]: WARNING neutronclient.v2_0.client [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 606.350875] env[65503]: WARNING openstack [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 606.351192] env[65503]: WARNING openstack [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 606.379219] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Updated VIF entry in instance network info cache for port 2afefd06-f1b1-4227-b8ea-28c0ccdf5b69. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 606.379648] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Updating instance_info_cache with network_info: [{"id": "2afefd06-f1b1-4227-b8ea-28c0ccdf5b69", "address": "fa:16:3e:3d:66:35", "network": {"id": "63d8b798-8291-48f6-9bf6-ae2496aba792", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-193093434-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "810a8d23b638450180bc37c0a952bcf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2afefd06-f1", "ovs_interfaceid": "2afefd06-f1b1-4227-b8ea-28c0ccdf5b69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 606.447099] env[65503]: DEBUG nova.compute.manager [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Received event network-vif-plugged-b5328355-0841-4441-9689-a82ea7088346 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 606.447099] env[65503]: DEBUG oslo_concurrency.lockutils [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Acquiring lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.451732] env[65503]: DEBUG oslo_concurrency.lockutils [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.452082] env[65503]: DEBUG oslo_concurrency.lockutils [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.005s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.452373] env[65503]: DEBUG nova.compute.manager [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] No waiting events found dispatching network-vif-plugged-b5328355-0841-4441-9689-a82ea7088346 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 606.452584] env[65503]: WARNING nova.compute.manager [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Received unexpected event network-vif-plugged-b5328355-0841-4441-9689-a82ea7088346 for instance with vm_state building and task_state spawning. [ 606.452739] env[65503]: DEBUG nova.compute.manager [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Received event network-changed-b5328355-0841-4441-9689-a82ea7088346 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 606.452880] env[65503]: DEBUG nova.compute.manager [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Refreshing instance network info cache due to event network-changed-b5328355-0841-4441-9689-a82ea7088346. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 606.453348] env[65503]: DEBUG oslo_concurrency.lockutils [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Acquiring lock "refresh_cache-2dbc3860-c65c-4cbb-8d90-f1f74420e652" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.453348] env[65503]: DEBUG oslo_concurrency.lockutils [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Acquired lock "refresh_cache-2dbc3860-c65c-4cbb-8d90-f1f74420e652" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.453459] env[65503]: DEBUG nova.network.neutron [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Refreshing network info cache for port b5328355-0841-4441-9689-a82ea7088346 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 606.483514] env[65503]: DEBUG nova.compute.manager [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 606.484256] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.485986] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1bbdc1-17a4-4bf4-81a3-ba07d234d203 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.490484] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.497712] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 606.498136] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b90b690-3346-4c65-82c3-7dd4aef24891 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.507165] env[65503]: DEBUG oslo_vmware.api [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Waiting for the task: (returnval){ [ 606.507165] env[65503]: value = "task-4449388" [ 606.507165] env[65503]: _type = "Task" [ 606.507165] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.517324] env[65503]: DEBUG nova.network.neutron [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 606.524184] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.524428] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.524622] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.524816] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.524978] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.532636] env[65503]: INFO nova.compute.manager [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Took 19.24 seconds to build instance. [ 606.533674] env[65503]: DEBUG oslo_vmware.api [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Task: {'id': task-4449388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.539429] env[65503]: INFO nova.compute.manager [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Terminating instance [ 606.759675] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449387, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.764214] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.766224] env[65503]: DEBUG nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 606.773534] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.956s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.773534] env[65503]: INFO nova.compute.claims [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 606.882265] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Releasing lock "refresh_cache-02b3b44e-96bb-47a0-8aa0-7026d987cad8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.882748] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Received event network-vif-plugged-6164c166-4054-4e4a-93fb-6e84abe74f7d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 606.883376] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquiring lock "ad85eef0-cef7-4900-b193-1737a6c2f17b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.883501] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.883740] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.883925] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] No waiting events found dispatching network-vif-plugged-6164c166-4054-4e4a-93fb-6e84abe74f7d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 606.884105] env[65503]: WARNING nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Received unexpected event network-vif-plugged-6164c166-4054-4e4a-93fb-6e84abe74f7d for instance with vm_state building and task_state spawning. [ 606.884260] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Received event network-changed-6164c166-4054-4e4a-93fb-6e84abe74f7d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 606.884418] env[65503]: DEBUG nova.compute.manager [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Refreshing instance network info cache due to event network-changed-6164c166-4054-4e4a-93fb-6e84abe74f7d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 606.884633] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquiring lock "refresh_cache-ad85eef0-cef7-4900-b193-1737a6c2f17b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.884759] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Acquired lock "refresh_cache-ad85eef0-cef7-4900-b193-1737a6c2f17b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.884904] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Refreshing network info cache for port 6164c166-4054-4e4a-93fb-6e84abe74f7d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 606.956176] env[65503]: WARNING neutronclient.v2_0.client [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 606.956993] env[65503]: WARNING openstack [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 606.957126] env[65503]: WARNING openstack [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.017726] env[65503]: DEBUG oslo_vmware.api [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Task: {'id': task-4449388, 'name': PowerOffVM_Task, 'duration_secs': 0.26617} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.018507] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 607.018507] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 607.018973] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4bf1f09-414f-4cb6-a3e6-1b791e554822 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.035322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a2bf63f4-edab-4d80-86a2-5499de199bec tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.747s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.045680] env[65503]: DEBUG nova.compute.manager [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 607.045952] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.047215] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2f8901-39e2-4e04-8848-c648de86e377 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.058875] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 607.059422] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7e0be30-36b5-4240-98e5-51cde7be8b61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.068318] env[65503]: DEBUG oslo_vmware.api [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 607.068318] env[65503]: value = "task-4449390" [ 607.068318] env[65503]: _type = "Task" [ 607.068318] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.077195] env[65503]: DEBUG oslo_vmware.api [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.098531] env[65503]: DEBUG nova.network.neutron [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Successfully updated port: cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 607.103488] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 607.103789] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 607.103975] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Deleting the datastore file [datastore2] c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.104670] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0793f924-f24e-48f7-b9a6-deea088c8d53 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.115106] env[65503]: DEBUG oslo_vmware.api [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Waiting for the task: (returnval){ [ 607.115106] env[65503]: value = "task-4449391" [ 607.115106] env[65503]: _type = "Task" [ 607.115106] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.131587] env[65503]: DEBUG oslo_vmware.api [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Task: {'id': task-4449391, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.224993] env[65503]: DEBUG nova.network.neutron [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 607.258328] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449387, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.277637] env[65503]: DEBUG nova.compute.utils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 607.286273] env[65503]: DEBUG nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 607.331820] env[65503]: WARNING openstack [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 607.333505] env[65503]: WARNING openstack [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.390935] env[65503]: WARNING neutronclient.v2_0.client [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 607.391813] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 607.392219] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.487044] env[65503]: WARNING neutronclient.v2_0.client [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 607.487728] env[65503]: WARNING openstack [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 607.488109] env[65503]: WARNING openstack [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.585361] env[65503]: DEBUG oslo_vmware.api [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449390, 'name': PowerOffVM_Task, 'duration_secs': 0.219959} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.585585] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 607.585852] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 607.587703] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e55ac0cc-ecc2-4151-939e-fcb03d3a41bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.608496] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "refresh_cache-1e92795e-cf30-4175-9e31-c29278f3e9e0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.608496] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "refresh_cache-1e92795e-cf30-4175-9e31-c29278f3e9e0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.608496] env[65503]: DEBUG nova.network.neutron [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 607.628984] env[65503]: DEBUG oslo_vmware.api [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Task: {'id': task-4449391, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380407} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.630018] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 607.630452] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 607.630452] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 607.630633] env[65503]: INFO nova.compute.manager [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 607.630876] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 607.631085] env[65503]: DEBUG nova.compute.manager [-] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 607.631178] env[65503]: DEBUG nova.network.neutron [-] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 607.631435] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 607.632116] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 607.633299] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.671090] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 607.671090] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 607.671090] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Deleting the datastore file [datastore2] 02b3b44e-96bb-47a0-8aa0-7026d987cad8 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.671090] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c60bdbbd-cfdf-4bef-8360-51edca917acc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.685546] env[65503]: DEBUG oslo_vmware.api [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for the task: (returnval){ [ 607.685546] env[65503]: value = "task-4449393" [ 607.685546] env[65503]: _type = "Task" [ 607.685546] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.703920] env[65503]: DEBUG oslo_vmware.api [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.709995] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 607.710687] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.728710] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Releasing lock "refresh_cache-c6aecf44-9a23-47a2-b1aa-6530b4119b1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.729383] env[65503]: DEBUG nova.compute.manager [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 607.729669] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.731064] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a082b0-722a-4cde-8c8b-f4f2ead20105 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.744441] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 607.744660] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac7b37a0-c622-4fd3-801a-c181ea17a8a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.761528] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449387, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.336485} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.764110] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Copied Virtual Disk [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk to [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 607.764423] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Deleting the datastore file [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93/tmp-sparse.vmdk {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.764926] env[65503]: DEBUG oslo_vmware.api [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 607.764926] env[65503]: value = "task-4449394" [ 607.764926] env[65503]: _type = "Task" [ 607.764926] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.765390] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-850071cb-b6ee-4982-9d6c-718f5957267e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.777370] env[65503]: DEBUG nova.network.neutron [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Updated VIF entry in instance network info cache for port b5328355-0841-4441-9689-a82ea7088346. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 607.777825] env[65503]: DEBUG nova.network.neutron [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Updating instance_info_cache with network_info: [{"id": "b5328355-0841-4441-9689-a82ea7088346", "address": "fa:16:3e:bf:67:4a", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.136", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5328355-08", "ovs_interfaceid": "b5328355-0841-4441-9689-a82ea7088346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 607.786671] env[65503]: DEBUG oslo_vmware.api [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449394, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.788812] env[65503]: DEBUG nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 607.796552] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 607.796552] env[65503]: value = "task-4449395" [ 607.796552] env[65503]: _type = "Task" [ 607.796552] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.808428] env[65503]: DEBUG nova.compute.manager [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 607.818681] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031511} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.818990] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 607.819304] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Moving file from [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d/d68ffece-ab91-4610-b535-fa1fb25ade93 to [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93. {{(pid=65503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 607.819589] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-7bd15e9c-2784-44c8-945d-d4a55a32521f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.824913] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 607.832034] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 607.832034] env[65503]: value = "task-4449396" [ 607.832034] env[65503]: _type = "Task" [ 607.832034] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.845667] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449396, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.105138] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64093db4-22d7-408e-b5b0-fec96be3a2bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.113246] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806d18b2-89be-464c-a0e4-064357406178 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.117530] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 608.118578] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 608.160501] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2e8ac5-78aa-468e-8336-5fb6efa5657f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.170232] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6178a6ca-6366-49b9-a19b-0010142e1b6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.189231] env[65503]: DEBUG nova.compute.provider_tree [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 608.204133] env[65503]: DEBUG oslo_vmware.api [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Task: {'id': task-4449393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267214} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.204133] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.204221] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 608.204338] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.204479] env[65503]: INFO nova.compute.manager [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 608.205371] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 608.205371] env[65503]: DEBUG nova.compute.manager [-] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 608.205371] env[65503]: DEBUG nova.network.neutron [-] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 608.205371] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 608.205734] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 608.205995] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 608.268869] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "1c598208-a4d0-46b8-9a9c-107353e957b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.269101] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.282513] env[65503]: DEBUG oslo_concurrency.lockutils [req-b2ac2a46-85f8-45a2-be7a-b091ececd978 req-64d34acb-5de9-41b9-9032-b54e387a0e69 service nova] Releasing lock "refresh_cache-2dbc3860-c65c-4cbb-8d90-f1f74420e652" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.283096] env[65503]: DEBUG oslo_vmware.api [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449394, 'name': PowerOffVM_Task, 'duration_secs': 0.201742} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.283218] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 608.283979] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 608.284138] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c829c292-bb35-47d0-94e7-39623b046922 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.329305] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 608.330753] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 608.330753] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Deleting the datastore file [datastore2] c6aecf44-9a23-47a2-b1aa-6530b4119b1d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 608.330753] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-760a8cda-77cc-4219-9b7f-a0e4ed56492a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.341726] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.344041] env[65503]: DEBUG oslo_vmware.api [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for the task: (returnval){ [ 608.344041] env[65503]: value = "task-4449398" [ 608.344041] env[65503]: _type = "Task" [ 608.344041] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.351824] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449396, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.034358} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.352573] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] File moved {{(pid=65503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 608.352865] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Cleaning up location [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 608.353066] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Deleting the datastore file [datastore1] vmware_temp/39b0e7ca-4426-4793-9619-5a1fd00f637d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 608.353343] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a12d61e-7e18-4055-878e-091b7e41e6e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.358364] env[65503]: DEBUG oslo_vmware.api [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.365969] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 608.365969] env[65503]: value = "task-4449399" [ 608.365969] env[65503]: _type = "Task" [ 608.365969] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.377561] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449399, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.448599] env[65503]: WARNING neutronclient.v2_0.client [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 608.449338] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 608.449800] env[65503]: WARNING openstack [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 608.485055] env[65503]: DEBUG nova.network.neutron [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 608.590013] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 608.647356] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 608.647918] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 608.699061] env[65503]: DEBUG nova.scheduler.client.report [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 608.775496] env[65503]: DEBUG nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 608.780030] env[65503]: DEBUG nova.network.neutron [-] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 608.803727] env[65503]: DEBUG nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 608.836399] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 608.836882] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 608.836882] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 608.837071] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 608.837129] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 608.837360] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 608.837460] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 608.837574] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 608.837732] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 608.837884] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 608.838401] env[65503]: DEBUG nova.virt.hardware [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 608.839581] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86d67e6-d4cb-46bf-bbb2-87bb768a4709 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.857862] env[65503]: DEBUG oslo_vmware.api [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Task: {'id': task-4449398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344947} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.860247] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.860589] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 608.860676] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.860795] env[65503]: INFO nova.compute.manager [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 608.861075] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 608.861337] env[65503]: DEBUG nova.compute.manager [-] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 608.861448] env[65503]: DEBUG nova.network.neutron [-] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 608.861758] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 608.862334] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 608.862580] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 608.870960] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96793081-977f-4928-86af-bbcb65d2d11e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.884766] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449399, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033098} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.894790] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.899021] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.901919] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Creating folder: Project (fece07f09a9349d2855b98011a732c31). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.902284] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49571774-8c42-4852-9752-f8202207458e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.906650] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d9d5c18-89c2-46c7-8f2a-1876848030e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.910993] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 608.910993] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5263c6a2-5dbe-1609-4377-9f8e5ee59d72" [ 608.910993] env[65503]: _type = "Task" [ 608.910993] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.920624] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5263c6a2-5dbe-1609-4377-9f8e5ee59d72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.922419] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Created folder: Project (fece07f09a9349d2855b98011a732c31) in parent group-v870190. [ 608.922419] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Creating folder: Instances. Parent ref: group-v870213. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.922643] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-293347e1-d99c-43bd-ad3b-9db35f33a1f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.937363] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Created folder: Instances in parent group-v870213. [ 608.937363] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 608.937662] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 608.938016] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3db2a95-ae80-4ae6-9e54-6a554bb6c220 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.969402] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.969402] env[65503]: value = "task-4449402" [ 608.969402] env[65503]: _type = "Task" [ 608.969402] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.984598] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449402, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.204505] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.205071] env[65503]: DEBUG nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 609.208095] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.311s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.209903] env[65503]: INFO nova.compute.claims [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 609.282036] env[65503]: INFO nova.compute.manager [-] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Took 1.65 seconds to deallocate network for instance. [ 609.303915] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.423310] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5263c6a2-5dbe-1609-4377-9f8e5ee59d72, 'name': SearchDatastore_Task, 'duration_secs': 0.016221} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.423666] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.423824] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 2dbc3860-c65c-4cbb-8d90-f1f74420e652/2dbc3860-c65c-4cbb-8d90-f1f74420e652.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.424112] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.424293] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.424644] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-addecd5e-2a2f-465e-ae19-91f8da25713f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.427111] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acd04856-4887-4c6c-a319-35b4dd7b9511 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.434233] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 609.434233] env[65503]: value = "task-4449403" [ 609.434233] env[65503]: _type = "Task" [ 609.434233] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.438478] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.438658] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.440159] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d570e7e3-5af2-4651-aa49-4fb89935896a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.445556] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.449046] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 609.449046] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52662d77-37c3-71ac-6d90-30d8c43b5674" [ 609.449046] env[65503]: _type = "Task" [ 609.449046] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.454584] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Updated VIF entry in instance network info cache for port 6164c166-4054-4e4a-93fb-6e84abe74f7d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 609.454909] env[65503]: DEBUG nova.network.neutron [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Updating instance_info_cache with network_info: [{"id": "6164c166-4054-4e4a-93fb-6e84abe74f7d", "address": "fa:16:3e:1e:c2:35", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6164c166-40", "ovs_interfaceid": "6164c166-4054-4e4a-93fb-6e84abe74f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 609.459561] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52662d77-37c3-71ac-6d90-30d8c43b5674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.485401] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449402, 'name': CreateVM_Task, 'duration_secs': 0.323811} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.485608] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.486017] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.486210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.486526] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.487378] env[65503]: DEBUG nova.network.neutron [-] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 609.487621] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 609.488685] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef36f30e-a99b-413e-9bdb-16db97336fbc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.494600] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 609.494600] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52749bec-d6d1-c069-0cbe-e3df9ea93b39" [ 609.494600] env[65503]: _type = "Task" [ 609.494600] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.505390] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52749bec-d6d1-c069-0cbe-e3df9ea93b39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.596558] env[65503]: WARNING neutronclient.v2_0.client [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 609.597503] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 609.597664] env[65503]: WARNING openstack [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 609.724558] env[65503]: DEBUG nova.compute.utils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 609.726836] env[65503]: DEBUG nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 609.727140] env[65503]: DEBUG nova.network.neutron [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 609.727568] env[65503]: WARNING neutronclient.v2_0.client [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 609.728103] env[65503]: WARNING neutronclient.v2_0.client [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 609.729135] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 609.729135] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 609.744074] env[65503]: DEBUG nova.network.neutron [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Updating instance_info_cache with network_info: [{"id": "cde44ebc-2cc2-4df5-9886-d3766e7f4bb9", "address": "fa:16:3e:1f:04:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcde44ebc-2c", "ovs_interfaceid": "cde44ebc-2cc2-4df5-9886-d3766e7f4bb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 609.778981] env[65503]: DEBUG nova.policy [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d0c83aa7c5c49fea5510c83bea99a1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72a3774600024de1b1347117fd020278', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 609.790907] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.890155] env[65503]: DEBUG nova.network.neutron [-] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 609.949323] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449403, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.961529] env[65503]: DEBUG oslo_concurrency.lockutils [req-49204196-d685-4724-bb12-3740083473cb req-a0521f18-100e-4624-80e2-6fc4e0d33060 service nova] Releasing lock "refresh_cache-ad85eef0-cef7-4900-b193-1737a6c2f17b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.961780] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52662d77-37c3-71ac-6d90-30d8c43b5674, 'name': SearchDatastore_Task, 'duration_secs': 0.021271} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.962691] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be3786be-db3a-4713-a5e1-5115507730a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.970514] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 609.970514] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea623b-b3a8-a163-fdb2-a274ded04645" [ 609.970514] env[65503]: _type = "Task" [ 609.970514] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.985065] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea623b-b3a8-a163-fdb2-a274ded04645, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.993018] env[65503]: DEBUG nova.network.neutron [-] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 610.012498] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52749bec-d6d1-c069-0cbe-e3df9ea93b39, 'name': SearchDatastore_Task, 'duration_secs': 0.013734} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.012642] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.013084] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.013378] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.013494] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.013764] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.014791] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-660a8a79-e8c5-405e-bf90-a392cd66be4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.030511] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.030797] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.032279] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e2237ce-f550-4b6a-b13b-56f7f99eb8e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.040551] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 610.040551] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524fcb36-723c-4254-86a8-ad018ee8106b" [ 610.040551] env[65503]: _type = "Task" [ 610.040551] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.054430] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524fcb36-723c-4254-86a8-ad018ee8106b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.186468] env[65503]: DEBUG nova.compute.manager [req-da1d658f-5962-4784-b5c5-4bcbc479b093 req-0e52bcb3-31e8-43b2-afa5-72bca6b252ca service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Received event network-vif-plugged-cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 610.188040] env[65503]: DEBUG oslo_concurrency.lockutils [req-da1d658f-5962-4784-b5c5-4bcbc479b093 req-0e52bcb3-31e8-43b2-afa5-72bca6b252ca service nova] Acquiring lock "1e92795e-cf30-4175-9e31-c29278f3e9e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.189039] env[65503]: DEBUG oslo_concurrency.lockutils [req-da1d658f-5962-4784-b5c5-4bcbc479b093 req-0e52bcb3-31e8-43b2-afa5-72bca6b252ca service nova] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.189039] env[65503]: DEBUG oslo_concurrency.lockutils [req-da1d658f-5962-4784-b5c5-4bcbc479b093 req-0e52bcb3-31e8-43b2-afa5-72bca6b252ca service nova] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.189279] env[65503]: DEBUG nova.compute.manager [req-da1d658f-5962-4784-b5c5-4bcbc479b093 req-0e52bcb3-31e8-43b2-afa5-72bca6b252ca service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] No waiting events found dispatching network-vif-plugged-cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 610.189279] env[65503]: WARNING nova.compute.manager [req-da1d658f-5962-4784-b5c5-4bcbc479b093 req-0e52bcb3-31e8-43b2-afa5-72bca6b252ca service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Received unexpected event network-vif-plugged-cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 for instance with vm_state building and task_state spawning. [ 610.227834] env[65503]: DEBUG nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 610.245391] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "refresh_cache-1e92795e-cf30-4175-9e31-c29278f3e9e0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.245913] env[65503]: DEBUG nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Instance network_info: |[{"id": "cde44ebc-2cc2-4df5-9886-d3766e7f4bb9", "address": "fa:16:3e:1f:04:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcde44ebc-2c", "ovs_interfaceid": "cde44ebc-2cc2-4df5-9886-d3766e7f4bb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 610.247648] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:04:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cde44ebc-2cc2-4df5-9886-d3766e7f4bb9', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.267628] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 610.271339] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 610.273087] env[65503]: DEBUG nova.network.neutron [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Successfully created port: 74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 610.275932] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ee2b1c4-7271-4062-8329-89585c68aa51 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.305636] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.305636] env[65503]: value = "task-4449404" [ 610.305636] env[65503]: _type = "Task" [ 610.305636] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.319983] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449404, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.394346] env[65503]: INFO nova.compute.manager [-] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Took 2.19 seconds to deallocate network for instance. [ 610.449412] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.907743} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.450208] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 2dbc3860-c65c-4cbb-8d90-f1f74420e652/2dbc3860-c65c-4cbb-8d90-f1f74420e652.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.450481] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.450703] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-881f3d53-d55d-40cd-bcce-555b7b65d79f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.459729] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 610.459729] env[65503]: value = "task-4449405" [ 610.459729] env[65503]: _type = "Task" [ 610.459729] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.472675] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.484943] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea623b-b3a8-a163-fdb2-a274ded04645, 'name': SearchDatastore_Task, 'duration_secs': 0.066542} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.488670] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.488981] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d/f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.489506] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aaf4ba95-798a-41e7-891e-2e548fcaa54a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.495508] env[65503]: INFO nova.compute.manager [-] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Took 1.63 seconds to deallocate network for instance. [ 610.499042] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 610.499042] env[65503]: value = "task-4449406" [ 610.499042] env[65503]: _type = "Task" [ 610.499042] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.519793] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.563808] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524fcb36-723c-4254-86a8-ad018ee8106b, 'name': SearchDatastore_Task, 'duration_secs': 0.028254} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.565100] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd9b1ecd-a763-41df-a523-d72c9bc3dd00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.572813] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 610.572813] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5213374d-9821-ecfe-15a7-3d712c5d258d" [ 610.572813] env[65503]: _type = "Task" [ 610.572813] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.600795] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5213374d-9821-ecfe-15a7-3d712c5d258d, 'name': SearchDatastore_Task, 'duration_secs': 0.015564} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.604103] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.604572] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.606382] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bca0bae-49f6-4f54-a8d2-0f3bad3ba6a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.611264] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a1060dd-de24-4ab6-a341-1499d4c6ca07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.626161] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebebc21-5e72-4c67-bb61-711ccc1f7af9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.635379] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 610.635379] env[65503]: value = "task-4449407" [ 610.635379] env[65503]: _type = "Task" [ 610.635379] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.692258] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0741f9-cfda-4fd4-b570-d0a0a2068e3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.697833] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.704212] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3626743d-44d6-4ac4-bb80-9e4b30674291 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.720945] env[65503]: DEBUG nova.compute.provider_tree [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.819227] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449404, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.905370] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.972845] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184729} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.973199] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 610.974149] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613c1188-1bc2-4a89-b286-4369d7e4a7d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.002476] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 2dbc3860-c65c-4cbb-8d90-f1f74420e652/2dbc3860-c65c-4cbb-8d90-f1f74420e652.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.002892] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3f5a0bd-748f-439b-9ed9-5b0923fa8e32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.021046] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.033351] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449406, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.035237] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 611.035237] env[65503]: value = "task-4449408" [ 611.035237] env[65503]: _type = "Task" [ 611.035237] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.150100] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449407, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.154425] env[65503]: DEBUG nova.compute.manager [req-8a3eb55a-b577-49b6-bfa6-6ad3cb6ef603 req-0eec9774-c0b9-483e-a084-93591b615296 service nova] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Received event network-vif-deleted-2afefd06-f1b1-4227-b8ea-28c0ccdf5b69 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 611.227915] env[65503]: DEBUG nova.scheduler.client.report [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 611.241629] env[65503]: DEBUG nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 611.280413] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 611.280881] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 611.280971] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 611.281470] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 611.281851] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 611.282135] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 611.282629] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 611.282825] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 611.283079] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 611.283351] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 611.283949] env[65503]: DEBUG nova.virt.hardware [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 611.284670] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacf64b5-810f-4876-a5ff-24141b974585 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.299683] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249c4c6e-6e0d-42fc-8609-0efbf369673a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.330475] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449404, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.533904] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449406, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.867469} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.534361] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d/f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.534590] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.534875] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-afaa4eb9-027d-4e29-b991-63a1ab3fb2a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.550040] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449408, 'name': ReconfigVM_Task, 'duration_secs': 0.47826} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.552260] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 2dbc3860-c65c-4cbb-8d90-f1f74420e652/2dbc3860-c65c-4cbb-8d90-f1f74420e652.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.552260] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 611.552260] env[65503]: value = "task-4449409" [ 611.552260] env[65503]: _type = "Task" [ 611.552260] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.553032] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aba9b980-6ac4-436e-bd71-437192f36e34 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.567294] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449409, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.569528] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 611.569528] env[65503]: value = "task-4449410" [ 611.569528] env[65503]: _type = "Task" [ 611.569528] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.588449] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449410, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.650134] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449407, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.002577} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.650577] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.650697] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.650973] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6a19211-8131-488e-9149-1f53577b27ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.661044] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 611.661044] env[65503]: value = "task-4449411" [ 611.661044] env[65503]: _type = "Task" [ 611.661044] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.670167] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449411, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.737400] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.739715] env[65503]: DEBUG nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 611.742258] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.069s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.744890] env[65503]: INFO nova.compute.claims [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.823591] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449404, 'name': CreateVM_Task, 'duration_secs': 1.238842} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.823591] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 611.823591] env[65503]: WARNING neutronclient.v2_0.client [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 611.824059] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.824059] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.825046] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 611.826161] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69815708-f23c-467f-83ac-32aa3d318844 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.832572] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 611.832572] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5204f08c-fd59-e71f-24fd-6ef0ccfa3a35" [ 611.832572] env[65503]: _type = "Task" [ 611.832572] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.842266] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5204f08c-fd59-e71f-24fd-6ef0ccfa3a35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.055497] env[65503]: DEBUG nova.network.neutron [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Successfully updated port: 74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 612.071988] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449409, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088454} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.071988] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.071988] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1105d2b1-0403-4d00-a402-efe3ade0ce8f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.099456] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449410, 'name': Rename_Task, 'duration_secs': 0.168125} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.108256] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d/f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.108686] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.109675] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-892d5d9f-a3d0-4f06-8be7-f03ec3c9b2c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.127212] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f0c87ab-11e8-49cf-b956-bc5491b7382c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.136362] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 612.136362] env[65503]: value = "task-4449412" [ 612.136362] env[65503]: _type = "Task" [ 612.136362] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.137992] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 612.137992] env[65503]: value = "task-4449413" [ 612.137992] env[65503]: _type = "Task" [ 612.137992] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.154527] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.154969] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449413, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.170318] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449411, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082514} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.170888] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.172212] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfe850e-cb72-4036-ab3f-5bae1f34d24a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.194290] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.195433] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4687b939-89bd-49ae-bffb-2fa99daa6ddb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.218620] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 612.218620] env[65503]: value = "task-4449414" [ 612.218620] env[65503]: _type = "Task" [ 612.218620] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.230839] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449414, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.251023] env[65503]: DEBUG nova.compute.utils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 612.253860] env[65503]: DEBUG nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 612.254269] env[65503]: DEBUG nova.network.neutron [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 612.254681] env[65503]: WARNING neutronclient.v2_0.client [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 612.255120] env[65503]: WARNING neutronclient.v2_0.client [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 612.255795] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 612.256293] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 612.357784] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5204f08c-fd59-e71f-24fd-6ef0ccfa3a35, 'name': SearchDatastore_Task, 'duration_secs': 0.030164} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.357784] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.357784] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 612.358032] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.358336] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.358680] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 612.361038] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f533bdd-e4f6-4a42-9851-4a2eaa09d98d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.375609] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 612.377330] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 612.380594] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84e5e386-df12-445e-8d57-e709a7e598dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.392904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "9297d849-a966-48da-ba6a-453c42b99e44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.392904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "9297d849-a966-48da-ba6a-453c42b99e44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.393128] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 612.393128] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52692644-569d-71b6-0dae-76832c2932bb" [ 612.393128] env[65503]: _type = "Task" [ 612.393128] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.403435] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52692644-569d-71b6-0dae-76832c2932bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.558671] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.559018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquired lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.559401] env[65503]: DEBUG nova.network.neutron [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 612.651710] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449412, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.655607] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449413, 'name': ReconfigVM_Task, 'duration_secs': 0.437119} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.655909] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Reconfigured VM instance instance-00000006 to attach disk [datastore1] f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d/f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.656631] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-050bb99e-1de5-4813-885f-03f2c76bee12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.664419] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 612.664419] env[65503]: value = "task-4449415" [ 612.664419] env[65503]: _type = "Task" [ 612.664419] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.674208] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449415, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.733855] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449414, 'name': ReconfigVM_Task, 'duration_secs': 0.398404} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.733855] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.733855] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1366e1ac-588d-4716-a40a-8e72ec6707aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.741307] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 612.741307] env[65503]: value = "task-4449416" [ 612.741307] env[65503]: _type = "Task" [ 612.741307] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.751394] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449416, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.754857] env[65503]: DEBUG nova.policy [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4d889f5b4554fdf887ad0e27a7370aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6797718310754721b81c8d30acfdacd5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 612.756991] env[65503]: DEBUG nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 612.904950] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52692644-569d-71b6-0dae-76832c2932bb, 'name': SearchDatastore_Task, 'duration_secs': 0.022555} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.909834] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21b832a3-e24c-4442-9db5-7cd509350c65 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.920847] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 612.920847] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527c8524-6906-d357-af68-2d6ecc0bf81d" [ 612.920847] env[65503]: _type = "Task" [ 612.920847] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.934316] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527c8524-6906-d357-af68-2d6ecc0bf81d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.064146] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 613.064677] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 613.088521] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bc7533-04f2-4d0e-9e23-440548cf2522 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.099092] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acee8bee-1807-42eb-9e5b-25bc22142112 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.135440] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ad9015-2b67-4d69-8b21-c893fba79f28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.151471] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a9f04b-e935-4836-a046-8f0961cacec2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.155620] env[65503]: DEBUG oslo_vmware.api [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449412, 'name': PowerOnVM_Task, 'duration_secs': 0.563674} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.155960] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 613.156180] env[65503]: INFO nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Took 13.18 seconds to spawn the instance on the hypervisor. [ 613.156391] env[65503]: DEBUG nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 613.157609] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded25af5-752a-4b94-99f6-b0a47d54b3fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.169157] env[65503]: DEBUG nova.compute.provider_tree [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.184452] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449415, 'name': Rename_Task, 'duration_secs': 0.259554} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.184709] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 613.184944] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d206fac-4b74-440b-8246-faabe4081193 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.193161] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 613.193161] env[65503]: value = "task-4449417" [ 613.193161] env[65503]: _type = "Task" [ 613.193161] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.203251] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449417, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.253404] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449416, 'name': Rename_Task, 'duration_secs': 0.231764} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.255047] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 613.255047] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80f72a16-4a13-4f1c-9be2-e0c3af74a812 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.267223] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 613.267223] env[65503]: value = "task-4449418" [ 613.267223] env[65503]: _type = "Task" [ 613.267223] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.279292] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.436304] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527c8524-6906-d357-af68-2d6ecc0bf81d, 'name': SearchDatastore_Task, 'duration_secs': 0.015308} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.436304] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.436304] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 1e92795e-cf30-4175-9e31-c29278f3e9e0/1e92795e-cf30-4175-9e31-c29278f3e9e0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 613.436304] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bacfd6c-b057-44a2-9b0e-d87d746fcf21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.450670] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 613.450670] env[65503]: value = "task-4449419" [ 613.450670] env[65503]: _type = "Task" [ 613.450670] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.457904] env[65503]: DEBUG nova.network.neutron [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Successfully created port: 984474f0-e03b-413b-8c1f-8e553672a7df {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 613.490231] env[65503]: DEBUG nova.network.neutron [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 613.677772] env[65503]: DEBUG nova.scheduler.client.report [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 613.691311] env[65503]: INFO nova.compute.manager [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Took 24.55 seconds to build instance. [ 613.713419] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449417, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.769045] env[65503]: DEBUG nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 613.788342] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "85d0ed1d-6306-4999-832b-f4e69233fec7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.788604] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.789589] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449418, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.800671] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 613.800965] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 613.801809] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 613.802100] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 613.802234] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 613.802387] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 613.802619] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.802790] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 613.802993] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 613.803254] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 613.803462] env[65503]: DEBUG nova.virt.hardware [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 613.804431] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2754d41-a707-4b9e-a861-5f810470b2c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.815582] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f019216-3e10-42e0-940c-aa6822a6a7aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.964201] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449419, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.189783] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.190538] env[65503]: DEBUG nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 614.193692] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.321s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.195280] env[65503]: INFO nova.compute.claims [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 614.198152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04c5aa1e-76d2-4eec-9114-0b25e59d9c81 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.076s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.209149] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449417, 'name': PowerOnVM_Task} progress is 64%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.224085] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 614.224745] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 614.283834] env[65503]: DEBUG oslo_vmware.api [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449418, 'name': PowerOnVM_Task, 'duration_secs': 0.57002} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.284176] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.284387] env[65503]: INFO nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Took 5.48 seconds to spawn the instance on the hypervisor. [ 614.284561] env[65503]: DEBUG nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 614.285432] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a064ac-674e-4fd5-a208-6241aed59b49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.462861] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449419, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742908} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.463371] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 1e92795e-cf30-4175-9e31-c29278f3e9e0/1e92795e-cf30-4175-9e31-c29278f3e9e0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 614.463854] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 614.464114] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11eda807-fff6-43eb-b2ac-9b909e74f464 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.473244] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 614.473244] env[65503]: value = "task-4449420" [ 614.473244] env[65503]: _type = "Task" [ 614.473244] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.486841] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449420, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.700727] env[65503]: DEBUG nova.compute.utils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 614.709694] env[65503]: DEBUG nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 614.710237] env[65503]: DEBUG nova.network.neutron [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 614.711094] env[65503]: WARNING neutronclient.v2_0.client [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 614.711094] env[65503]: WARNING neutronclient.v2_0.client [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 614.711941] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 614.713446] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 614.727062] env[65503]: DEBUG nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 614.755400] env[65503]: DEBUG oslo_vmware.api [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4449417, 'name': PowerOnVM_Task, 'duration_secs': 1.232465} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.756496] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.756496] env[65503]: INFO nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Took 17.17 seconds to spawn the instance on the hypervisor. [ 614.756496] env[65503]: DEBUG nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 614.757637] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170b96b2-9183-40fd-8254-cd2af4b64d3d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.815697] env[65503]: INFO nova.compute.manager [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Took 23.07 seconds to build instance. [ 614.989705] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449420, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085955} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.990343] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 614.991363] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724dbed5-b0c9-47d7-a4ba-83c9f7fa2c6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.022026] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 1e92795e-cf30-4175-9e31-c29278f3e9e0/1e92795e-cf30-4175-9e31-c29278f3e9e0.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 615.022026] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c83c5e90-9fb6-4f13-a205-f38c6fc8e114 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.044793] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 615.044793] env[65503]: value = "task-4449421" [ 615.044793] env[65503]: _type = "Task" [ 615.044793] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.055295] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449421, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.172609] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527a06ce-88ec-4322-afc0-0c4ab400b6e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.181717] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1925ce98-eac3-44e2-9120-74b69b34c0b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.218204] env[65503]: DEBUG nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 615.222925] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd693f6b-9546-4ec2-acc8-e9498c6a5f63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.233068] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b96011d-eec3-449b-9c8d-02e5d2e43251 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.254174] env[65503]: DEBUG nova.compute.provider_tree [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.265263] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.279937] env[65503]: INFO nova.compute.manager [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Took 27.09 seconds to build instance. [ 615.322061] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf470204-cdaf-41a6-9f75-4be1b4541a82 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "50f11559-b8c7-41a2-aa43-255a28ffa58c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.588s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.429158] env[65503]: DEBUG nova.network.neutron [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Successfully updated port: 984474f0-e03b-413b-8c1f-8e553672a7df {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 615.529701] env[65503]: DEBUG nova.policy [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9011a8ab0f2446caab63f53d9611ce0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5db0fe1dba4a49a57fac8ebceab968', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 615.556727] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449421, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.759396] env[65503]: DEBUG nova.scheduler.client.report [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 615.782379] env[65503]: DEBUG oslo_concurrency.lockutils [None req-110c69bd-2f37-4e53-91dc-e26e76ed9a1e tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.604s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.825450] env[65503]: DEBUG nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 615.880032] env[65503]: WARNING neutronclient.v2_0.client [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 615.880753] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 615.881138] env[65503]: WARNING openstack [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 615.932237] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "refresh_cache-eec6a484-ab00-402e-a369-c3009065c553" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.932529] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired lock "refresh_cache-eec6a484-ab00-402e-a369-c3009065c553" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.932741] env[65503]: DEBUG nova.network.neutron [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 616.058471] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449421, 'name': ReconfigVM_Task, 'duration_secs': 0.914067} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.058757] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 1e92795e-cf30-4175-9e31-c29278f3e9e0/1e92795e-cf30-4175-9e31-c29278f3e9e0.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 616.060193] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22497bac-cecf-41c9-96a1-214616c81065 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.068162] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 616.068162] env[65503]: value = "task-4449422" [ 616.068162] env[65503]: _type = "Task" [ 616.068162] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.080885] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449422, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.237531] env[65503]: DEBUG nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 616.254265] env[65503]: DEBUG nova.network.neutron [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Successfully created port: 3985c7bb-5579-4aeb-9dce-54e2716f7d60 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 616.270405] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.076s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.271481] env[65503]: DEBUG nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 616.277423] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 616.277743] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 616.277873] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 616.278151] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 616.279027] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 616.279027] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 616.279144] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 616.280109] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 616.280109] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 616.280109] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 616.280109] env[65503]: DEBUG nova.virt.hardware [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 616.280403] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.790s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.282153] env[65503]: INFO nova.compute.claims [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.290092] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e451473-99f1-4181-9c6f-95da9183f25e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.308221] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92e52ef-f382-467a-a66b-007502585631 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.364662] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.437106] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 616.437758] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 616.537290] env[65503]: DEBUG nova.network.neutron [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Updating instance_info_cache with network_info: [{"id": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "address": "fa:16:3e:26:d2:17", "network": {"id": "d504c3f2-514b-4ccc-a7e0-1813e7cfece6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1527786722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72a3774600024de1b1347117fd020278", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74b1a7c2-cf", "ovs_interfaceid": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 616.580832] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449422, 'name': Rename_Task, 'duration_secs': 0.321159} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.581232] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 616.581511] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23cd48ef-edc5-413e-b6eb-3a0eaea395c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.590784] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 616.590784] env[65503]: value = "task-4449423" [ 616.590784] env[65503]: _type = "Task" [ 616.590784] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.596499] env[65503]: DEBUG nova.network.neutron [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 616.609027] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449423, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.800031] env[65503]: DEBUG nova.compute.utils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 616.802219] env[65503]: DEBUG nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 616.802744] env[65503]: DEBUG nova.network.neutron [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 616.804091] env[65503]: WARNING neutronclient.v2_0.client [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 616.804597] env[65503]: WARNING neutronclient.v2_0.client [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 616.805786] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 616.807865] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 617.026036] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 617.026036] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 617.041598] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Releasing lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.041999] env[65503]: DEBUG nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Instance network_info: |[{"id": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "address": "fa:16:3e:26:d2:17", "network": {"id": "d504c3f2-514b-4ccc-a7e0-1813e7cfece6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1527786722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72a3774600024de1b1347117fd020278", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74b1a7c2-cf", "ovs_interfaceid": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 617.042539] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:d2:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.054119] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Creating folder: Project (72a3774600024de1b1347117fd020278). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.054766] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b540a36-5786-4f71-ad06-a6a1afd5bb26 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.072304] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Created folder: Project (72a3774600024de1b1347117fd020278) in parent group-v870190. [ 617.072304] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Creating folder: Instances. Parent ref: group-v870217. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.072580] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-807d787b-1168-4b3c-b846-29dac36364c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.088332] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Created folder: Instances in parent group-v870217. [ 617.089099] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 617.089099] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 617.089458] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ababf37a-da19-445b-ab7f-b62360bb25d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.115609] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449423, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.116744] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.116744] env[65503]: value = "task-4449426" [ 617.116744] env[65503]: _type = "Task" [ 617.116744] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.127976] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449426, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.216685] env[65503]: DEBUG nova.policy [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2dd4af149654802843fcf90db0dbe72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f55d9e8eeb34665aaa465057871d687', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 617.322151] env[65503]: DEBUG nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 617.463867] env[65503]: WARNING neutronclient.v2_0.client [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 617.464751] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 617.465217] env[65503]: WARNING openstack [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 617.615496] env[65503]: DEBUG oslo_vmware.api [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449423, 'name': PowerOnVM_Task, 'duration_secs': 0.793936} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.615800] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 617.615968] env[65503]: INFO nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Took 11.38 seconds to spawn the instance on the hypervisor. [ 617.616639] env[65503]: DEBUG nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 617.620527] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5949dd56-ceec-4cb9-90e3-e1bd332c4f2a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.638472] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449426, 'name': CreateVM_Task, 'duration_secs': 0.466378} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.640432] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 617.650479] env[65503]: WARNING neutronclient.v2_0.client [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 617.650852] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.651051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.651434] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.652537] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2c6281a-bf29-4e61-ab8e-f9609c7676c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.664331] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 617.664331] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528a62fe-d65a-cd32-4bcc-7a387591b22a" [ 617.664331] env[65503]: _type = "Task" [ 617.664331] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.680715] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528a62fe-d65a-cd32-4bcc-7a387591b22a, 'name': SearchDatastore_Task, 'duration_secs': 0.0117} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.682901] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.682901] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.683131] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.683274] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.683493] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 617.684037] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-053f310a-7c95-45ab-8a58-7f53c282a612 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.694247] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 617.694433] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 617.695917] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-092e03f5-6db2-4186-a829-583ef1fe23b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.700751] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d18aa3-ccf4-4b66-83e1-935cbb2f3790 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.708994] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 617.708994] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525ac72f-aa56-9de2-74e9-a721cebaa2dc" [ 617.708994] env[65503]: _type = "Task" [ 617.708994] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.717888] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaf3720-84d3-41fd-8ac9-db0a938e11cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.730228] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525ac72f-aa56-9de2-74e9-a721cebaa2dc, 'name': SearchDatastore_Task, 'duration_secs': 0.01002} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.758894] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67ac600b-200d-42fa-943c-fe9f21e419b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.761975] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a993affd-21c3-4f3c-a702-66a1f8459ac8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.769283] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 617.769283] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527ccd55-2805-1c3e-3441-013c1fe0fcdd" [ 617.769283] env[65503]: _type = "Task" [ 617.769283] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.775894] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d1b8b4-ccfe-4ca7-b948-0788c5dac5a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.788289] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527ccd55-2805-1c3e-3441-013c1fe0fcdd, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.795093] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.795407] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] db942a2d-671b-4036-a80b-d2375145cd29/db942a2d-671b-4036-a80b-d2375145cd29.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 617.796248] env[65503]: DEBUG nova.compute.provider_tree [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.797858] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4f28ee2-930d-4ec4-b557-e237b599c830 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.806350] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 617.806350] env[65503]: value = "task-4449427" [ 617.806350] env[65503]: _type = "Task" [ 617.806350] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.812768] env[65503]: DEBUG nova.network.neutron [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Successfully created port: 939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 617.819172] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.137998] env[65503]: DEBUG nova.network.neutron [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Successfully updated port: 3985c7bb-5579-4aeb-9dce-54e2716f7d60 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 618.163204] env[65503]: INFO nova.compute.manager [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Took 27.72 seconds to build instance. [ 618.228074] env[65503]: DEBUG nova.network.neutron [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Updating instance_info_cache with network_info: [{"id": "984474f0-e03b-413b-8c1f-8e553672a7df", "address": "fa:16:3e:d7:64:bb", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984474f0-e0", "ovs_interfaceid": "984474f0-e03b-413b-8c1f-8e553672a7df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 618.302902] env[65503]: DEBUG nova.scheduler.client.report [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 618.325458] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449427, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511914} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.326434] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] db942a2d-671b-4036-a80b-d2375145cd29/db942a2d-671b-4036-a80b-d2375145cd29.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 618.326663] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 618.326922] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4a57060-5105-4805-8ec6-6330871e4d18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.335419] env[65503]: DEBUG nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 618.339926] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 618.339926] env[65503]: value = "task-4449428" [ 618.339926] env[65503]: _type = "Task" [ 618.339926] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.351215] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449428, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.365317] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 618.365577] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 618.365784] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 618.366077] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 618.366241] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 618.366395] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 618.366581] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.366729] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 618.366883] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 618.367044] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 618.367216] env[65503]: DEBUG nova.virt.hardware [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 618.368748] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235fa69f-7c74-4154-8deb-907f62eeb935 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.378085] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a86a110-fe2b-4626-97fa-31aa80a5360d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.462746] env[65503]: DEBUG nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Received event network-changed-cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 618.462948] env[65503]: DEBUG nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Refreshing instance network info cache due to event network-changed-cde44ebc-2cc2-4df5-9886-d3766e7f4bb9. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 618.463224] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Acquiring lock "refresh_cache-1e92795e-cf30-4175-9e31-c29278f3e9e0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.463368] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Acquired lock "refresh_cache-1e92795e-cf30-4175-9e31-c29278f3e9e0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.463517] env[65503]: DEBUG nova.network.neutron [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Refreshing network info cache for port cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 618.641627] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "refresh_cache-5e2cf383-312b-404f-acff-2ecb75678600" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.641627] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquired lock "refresh_cache-5e2cf383-312b-404f-acff-2ecb75678600" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.641627] env[65503]: DEBUG nova.network.neutron [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 618.666437] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9778f686-bd49-4e41-a270-6a3e225be441 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.229s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.734713] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Releasing lock "refresh_cache-eec6a484-ab00-402e-a369-c3009065c553" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.735124] env[65503]: DEBUG nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Instance network_info: |[{"id": "984474f0-e03b-413b-8c1f-8e553672a7df", "address": "fa:16:3e:d7:64:bb", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984474f0-e0", "ovs_interfaceid": "984474f0-e03b-413b-8c1f-8e553672a7df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 618.735636] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:64:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '984474f0-e03b-413b-8c1f-8e553672a7df', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 618.744468] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Creating folder: Project (6797718310754721b81c8d30acfdacd5). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 618.744468] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8598f14-364e-485b-bc6d-fcf9e3fed8b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.757303] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Created folder: Project (6797718310754721b81c8d30acfdacd5) in parent group-v870190. [ 618.757303] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Creating folder: Instances. Parent ref: group-v870220. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 618.757303] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6507ab19-b6b2-474c-ae72-d157134854d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.769937] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Created folder: Instances in parent group-v870220. [ 618.770275] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 618.770497] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec6a484-ab00-402e-a369-c3009065c553] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 618.770714] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c49491f5-ce29-486f-87d0-94300d06bb5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.791646] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 618.791646] env[65503]: value = "task-4449431" [ 618.791646] env[65503]: _type = "Task" [ 618.791646] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.800214] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449431, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.818505] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.819387] env[65503]: DEBUG nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 618.823919] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.482s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.851155] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449428, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071524} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.851377] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 618.852500] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f54e9dc-c6e7-41a9-8a68-2cb8a1ac0348 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.881205] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] db942a2d-671b-4036-a80b-d2375145cd29/db942a2d-671b-4036-a80b-d2375145cd29.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 618.881960] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a230f9a-b926-47b6-968c-9932f8af56f6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.903958] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 618.903958] env[65503]: value = "task-4449432" [ 618.903958] env[65503]: _type = "Task" [ 618.903958] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.916845] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449432, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.966436] env[65503]: WARNING neutronclient.v2_0.client [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 618.967140] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 618.967593] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.096653] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 619.096653] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.146607] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 619.147043] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.193700] env[65503]: DEBUG nova.network.neutron [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 619.220707] env[65503]: WARNING neutronclient.v2_0.client [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 619.221446] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 619.221825] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.289794] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 619.290713] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.309477] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449431, 'name': CreateVM_Task, 'duration_secs': 0.367283} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.309645] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec6a484-ab00-402e-a369-c3009065c553] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 619.310286] env[65503]: WARNING neutronclient.v2_0.client [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 619.310890] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.310890] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.311451] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 619.311905] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02b36d10-a8e8-494d-a9b1-0c9ae54e1668 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.320644] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 619.320644] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525340fe-872f-26e7-a6d2-ec9c2f51b93d" [ 619.320644] env[65503]: _type = "Task" [ 619.320644] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.334326] env[65503]: INFO nova.compute.claims [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.339288] env[65503]: DEBUG nova.compute.utils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 619.347918] env[65503]: DEBUG nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 619.348309] env[65503]: DEBUG nova.network.neutron [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 619.348796] env[65503]: WARNING neutronclient.v2_0.client [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 619.349143] env[65503]: WARNING neutronclient.v2_0.client [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 619.349914] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 619.350418] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.358020] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525340fe-872f-26e7-a6d2-ec9c2f51b93d, 'name': SearchDatastore_Task, 'duration_secs': 0.010532} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.359536] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.359536] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 619.359824] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.360042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.360346] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.361277] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e34c2df-2abb-46cc-9ef7-f4e67dd5f983 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.372574] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.373148] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 619.374511] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d8574ad-a00e-49be-8ceb-ae6a5b267845 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.382839] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 619.382839] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b6940c-1f71-4498-f8c3-0d19238d6b99" [ 619.382839] env[65503]: _type = "Task" [ 619.382839] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.398604] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b6940c-1f71-4498-f8c3-0d19238d6b99, 'name': SearchDatastore_Task, 'duration_secs': 0.010692} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.399062] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7072d203-bda9-48c8-920b-cd7cbfe6c4c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.406130] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 619.406130] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52854118-758c-6265-f0e1-6a863ff2988d" [ 619.406130] env[65503]: _type = "Task" [ 619.406130] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.420079] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.423622] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52854118-758c-6265-f0e1-6a863ff2988d, 'name': SearchDatastore_Task, 'duration_secs': 0.009693} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.426193] env[65503]: DEBUG nova.network.neutron [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Updated VIF entry in instance network info cache for port cde44ebc-2cc2-4df5-9886-d3766e7f4bb9. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 619.426623] env[65503]: DEBUG nova.network.neutron [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Updating instance_info_cache with network_info: [{"id": "cde44ebc-2cc2-4df5-9886-d3766e7f4bb9", "address": "fa:16:3e:1f:04:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcde44ebc-2c", "ovs_interfaceid": "cde44ebc-2cc2-4df5-9886-d3766e7f4bb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 619.427823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.428144] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] eec6a484-ab00-402e-a369-c3009065c553/eec6a484-ab00-402e-a369-c3009065c553.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.428701] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b12c7e60-7700-4403-a335-c247a44467f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.439302] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 619.439302] env[65503]: value = "task-4449433" [ 619.439302] env[65503]: _type = "Task" [ 619.439302] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.451804] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449433, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.541336] env[65503]: WARNING neutronclient.v2_0.client [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 619.541997] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 619.542362] env[65503]: WARNING openstack [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.582412] env[65503]: DEBUG nova.policy [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f7f16e9d1724717bd7e43299e795287', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dd65b5b754e4028a7aeecd7daaa4557', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 619.685262] env[65503]: DEBUG nova.network.neutron [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Successfully updated port: 939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 619.844474] env[65503]: INFO nova.compute.resource_tracker [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating resource usage from migration b59abcc2-79e0-4676-be84-c5f88601d520 [ 619.853458] env[65503]: DEBUG nova.network.neutron [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Updating instance_info_cache with network_info: [{"id": "3985c7bb-5579-4aeb-9dce-54e2716f7d60", "address": "fa:16:3e:95:7f:89", "network": {"id": "bf2cd38a-5089-47db-bb1c-35c17dddd3b2", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1414758544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5db0fe1dba4a49a57fac8ebceab968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3985c7bb-55", "ovs_interfaceid": "3985c7bb-5579-4aeb-9dce-54e2716f7d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 619.860257] env[65503]: DEBUG nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 619.921250] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449432, 'name': ReconfigVM_Task, 'duration_secs': 0.783554} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.921250] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Reconfigured VM instance instance-0000000a to attach disk [datastore2] db942a2d-671b-4036-a80b-d2375145cd29/db942a2d-671b-4036-a80b-d2375145cd29.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 619.923196] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d33ec340-a85d-4860-bbcc-e1dd67ce9051 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.933277] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Releasing lock "refresh_cache-1e92795e-cf30-4175-9e31-c29278f3e9e0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.933277] env[65503]: DEBUG nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Received event network-vif-deleted-c6e9e5bd-a5fa-4b57-9707-33c2c2106702 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 619.933277] env[65503]: DEBUG nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Received event network-vif-plugged-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 619.933277] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Acquiring lock "db942a2d-671b-4036-a80b-d2375145cd29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.933277] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Lock "db942a2d-671b-4036-a80b-d2375145cd29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.933456] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Lock "db942a2d-671b-4036-a80b-d2375145cd29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.933456] env[65503]: DEBUG nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] No waiting events found dispatching network-vif-plugged-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 619.933456] env[65503]: WARNING nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Received unexpected event network-vif-plugged-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 for instance with vm_state building and task_state spawning. [ 619.933456] env[65503]: DEBUG nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Received event network-changed-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 619.933456] env[65503]: DEBUG nova.compute.manager [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Refreshing instance network info cache due to event network-changed-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 619.933611] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Acquiring lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.933658] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Acquired lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.933810] env[65503]: DEBUG nova.network.neutron [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Refreshing network info cache for port 74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 619.937587] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 619.937587] env[65503]: value = "task-4449434" [ 619.937587] env[65503]: _type = "Task" [ 619.937587] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.955614] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449434, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.962838] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449433, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514042} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.969219] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] eec6a484-ab00-402e-a369-c3009065c553/eec6a484-ab00-402e-a369-c3009065c553.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 619.969388] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 619.970333] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3645f299-beb3-4512-9110-27c8f6479f56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.979813] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 619.979813] env[65503]: value = "task-4449435" [ 619.979813] env[65503]: _type = "Task" [ 619.979813] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.991718] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.189201] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.189493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquired lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.189611] env[65503]: DEBUG nova.network.neutron [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 620.246602] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d86c7d-976b-41f7-918b-bda00f6d420f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.258070] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a03e27a-6660-4166-a0d4-fa41f9f62e7a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.294472] env[65503]: DEBUG nova.network.neutron [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Successfully created port: 3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 620.298895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6a5e67-a085-4e32-8d3f-bde02e87f755 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.305320] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "174c806e-c2e8-4064-8800-d4a35c19f5e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.305535] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.314112] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef5d18e-6176-40d3-a3e4-8aaebb1c2fbb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.332981] env[65503]: DEBUG nova.compute.provider_tree [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.355577] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Releasing lock "refresh_cache-5e2cf383-312b-404f-acff-2ecb75678600" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.355946] env[65503]: DEBUG nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Instance network_info: |[{"id": "3985c7bb-5579-4aeb-9dce-54e2716f7d60", "address": "fa:16:3e:95:7f:89", "network": {"id": "bf2cd38a-5089-47db-bb1c-35c17dddd3b2", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1414758544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5db0fe1dba4a49a57fac8ebceab968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3985c7bb-55", "ovs_interfaceid": "3985c7bb-5579-4aeb-9dce-54e2716f7d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 620.356647] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:7f:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '572b7281-aad3-45fa-9cb2-fc1c70569948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3985c7bb-5579-4aeb-9dce-54e2716f7d60', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 620.365945] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Creating folder: Project (2a5db0fe1dba4a49a57fac8ebceab968). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.370437] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf1d6739-1a03-453a-89cd-96942c2e43c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.385808] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Created folder: Project (2a5db0fe1dba4a49a57fac8ebceab968) in parent group-v870190. [ 620.386400] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Creating folder: Instances. Parent ref: group-v870223. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.386400] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43804039-9e5c-4e76-940c-72606e45199c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.399469] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Created folder: Instances in parent group-v870223. [ 620.399700] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 620.399901] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 620.400233] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-314cca41-d742-4701-975c-0a02af79dcf5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.424165] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 620.424165] env[65503]: value = "task-4449438" [ 620.424165] env[65503]: _type = "Task" [ 620.424165] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.437975] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449438, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.438706] env[65503]: WARNING neutronclient.v2_0.client [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 620.439378] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 620.439760] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 620.459567] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449434, 'name': Rename_Task, 'duration_secs': 0.19301} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.459908] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 620.460239] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-359a749d-59e1-4366-8847-1e70dfc5fa3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.468878] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 620.468878] env[65503]: value = "task-4449439" [ 620.468878] env[65503]: _type = "Task" [ 620.468878] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.473133] env[65503]: INFO nova.compute.manager [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Rebuilding instance [ 620.484435] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449439, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.495244] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077146} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.495600] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.496688] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198e676b-89bf-44d0-8b6e-01849d10fc6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.525355] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] eec6a484-ab00-402e-a369-c3009065c553/eec6a484-ab00-402e-a369-c3009065c553.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.530327] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ddaf121-9933-45ea-8c0d-5346fea8fb1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.556694] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 620.556694] env[65503]: value = "task-4449440" [ 620.556694] env[65503]: _type = "Task" [ 620.556694] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.576219] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449440, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.579252] env[65503]: DEBUG nova.compute.manager [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 620.580295] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c58565-4c99-4d76-84d9-836bf1110801 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.694442] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 620.694923] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 620.808723] env[65503]: DEBUG nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 620.837805] env[65503]: DEBUG nova.scheduler.client.report [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.875949] env[65503]: DEBUG nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 620.908088] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 620.908088] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 620.908334] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 620.908470] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 620.908634] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 620.908834] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 620.908995] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.909281] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 620.909537] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 620.909760] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 620.909956] env[65503]: DEBUG nova.virt.hardware [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 620.910950] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4696ede-6d1b-4d72-ae32-fc4ea72070eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.920515] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59b52ff-b867-42ed-9360-afbdd5c39b28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.937962] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449438, 'name': CreateVM_Task, 'duration_secs': 0.498778} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.946095] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 620.946960] env[65503]: WARNING neutronclient.v2_0.client [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 620.947341] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.947510] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.948145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 620.948822] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbeefb30-1106-43ce-b612-e80703229591 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.955877] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 620.955877] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523415d0-dcf9-8efc-e53a-ccca6553f09c" [ 620.955877] env[65503]: _type = "Task" [ 620.955877] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.971751] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523415d0-dcf9-8efc-e53a-ccca6553f09c, 'name': SearchDatastore_Task, 'duration_secs': 0.011739} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.977486] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.977783] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.978036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.978232] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.979150] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.979673] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4ac43ea-f0d1-436c-a93d-4339257384e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.992697] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449439, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.993928] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.994199] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.994853] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f44b89e-762d-4fd7-99e4-29bc783ecb0b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.002308] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 621.002308] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e81d76-1cd8-d3e4-a6f8-ce46d6c04f80" [ 621.002308] env[65503]: _type = "Task" [ 621.002308] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.013988] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e81d76-1cd8-d3e4-a6f8-ce46d6c04f80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.068840] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449440, 'name': ReconfigVM_Task, 'duration_secs': 0.363469} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.068840] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Reconfigured VM instance instance-0000000b to attach disk [datastore2] eec6a484-ab00-402e-a369-c3009065c553/eec6a484-ab00-402e-a369-c3009065c553.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.069200] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b3f379f-56df-498a-a14b-8e5601e02c4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.078267] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 621.078267] env[65503]: value = "task-4449441" [ 621.078267] env[65503]: _type = "Task" [ 621.078267] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.089781] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449441, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.339046] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.347636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.521s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.347636] env[65503]: INFO nova.compute.manager [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Migrating [ 621.347636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.347636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "compute-rpcapi-router" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.347636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.043s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.350706] env[65503]: INFO nova.compute.claims [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.355281] env[65503]: INFO nova.compute.rpcapi [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Automatically selected compute RPC version 6.4 from minimum service version 70 [ 621.356384] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "compute-rpcapi-router" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.482437] env[65503]: DEBUG oslo_vmware.api [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449439, 'name': PowerOnVM_Task, 'duration_secs': 0.715708} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.483078] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 621.483431] env[65503]: INFO nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Took 10.24 seconds to spawn the instance on the hypervisor. [ 621.483758] env[65503]: DEBUG nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 621.484743] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a647d152-29f5-4bfc-b36a-f7067fc6aa2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.513474] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e81d76-1cd8-d3e4-a6f8-ce46d6c04f80, 'name': SearchDatastore_Task, 'duration_secs': 0.011879} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.514546] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b3aaa34-ba7c-4951-91cd-441c4f417452 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.520982] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 621.520982] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cb9f4b-a762-ca5e-a5d4-a0347e9a005d" [ 621.520982] env[65503]: _type = "Task" [ 621.520982] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.530496] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cb9f4b-a762-ca5e-a5d4-a0347e9a005d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.591016] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449441, 'name': Rename_Task, 'duration_secs': 0.169252} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.591016] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 621.591016] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83cb7544-9598-4571-a7ca-ae3b76ce9658 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.598657] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 621.598657] env[65503]: value = "task-4449442" [ 621.598657] env[65503]: _type = "Task" [ 621.598657] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.602372] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 621.602635] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36d32a62-5540-402e-b2b1-79e49c78c1f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.610352] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.611727] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 621.611727] env[65503]: value = "task-4449443" [ 621.611727] env[65503]: _type = "Task" [ 621.611727] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.621687] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.875699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.875892] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.876111] env[65503]: DEBUG nova.network.neutron [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 621.949966] env[65503]: DEBUG nova.network.neutron [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Successfully updated port: 3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 622.005746] env[65503]: INFO nova.compute.manager [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Took 23.22 seconds to build instance. [ 622.034919] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cb9f4b-a762-ca5e-a5d4-a0347e9a005d, 'name': SearchDatastore_Task, 'duration_secs': 0.00997} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.035212] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.035501] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5e2cf383-312b-404f-acff-2ecb75678600/5e2cf383-312b-404f-acff-2ecb75678600.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.035782] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0502700f-f955-423e-9bed-cea68bb56f7c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.044643] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 622.044643] env[65503]: value = "task-4449444" [ 622.044643] env[65503]: _type = "Task" [ 622.044643] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.053961] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.092034] env[65503]: DEBUG nova.network.neutron [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 622.111072] env[65503]: DEBUG oslo_vmware.api [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449442, 'name': PowerOnVM_Task, 'duration_secs': 0.492584} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.111072] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 622.111072] env[65503]: INFO nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Took 8.34 seconds to spawn the instance on the hypervisor. [ 622.111977] env[65503]: DEBUG nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 622.113055] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6414bc-8428-42b4-b4e3-b8f53ea55a6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.128814] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449443, 'name': PowerOffVM_Task, 'duration_secs': 0.254792} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.135493] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 622.135820] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.141357] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4dbe7e-cfce-4799-a6cc-2b0968b57375 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.150595] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 622.150893] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e00252a3-7eef-4568-815d-d58d1e6da265 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.184586] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 622.184857] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 622.185053] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Deleting the datastore file [datastore2] 50f11559-b8c7-41a2-aa43-255a28ffa58c {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 622.185415] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c37db903-3157-435a-955d-e44ba8695f17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.195961] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 622.195961] env[65503]: value = "task-4449446" [ 622.195961] env[65503]: _type = "Task" [ 622.195961] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.207951] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.379853] env[65503]: WARNING neutronclient.v2_0.client [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 622.381112] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 622.381774] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 622.455599] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "refresh_cache-2ab1cd4b-f2c0-4264-8463-8127a733a1c5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.455599] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired lock "refresh_cache-2ab1cd4b-f2c0-4264-8463-8127a733a1c5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.455599] env[65503]: DEBUG nova.network.neutron [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 622.508763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3cf7d454-3b3b-4072-9448-cc1a4ce43184 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "db942a2d-671b-4036-a80b-d2375145cd29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.736s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.510535] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 622.510918] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 622.528217] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 622.529496] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 622.560835] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449444, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.661299] env[65503]: INFO nova.compute.manager [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Took 22.79 seconds to build instance. [ 622.705828] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143631} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.706112] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 622.706453] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 622.706636] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 622.738609] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8389322-4164-4f01-ab5c-128af3d29957 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.749755] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19862a0-726a-485b-bffd-e44173e7d04d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.784566] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8c2150-0d30-49aa-bd77-fa4f79a2d997 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.793793] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f651c26-4020-415f-881d-c0322ef0a687 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.814358] env[65503]: DEBUG nova.compute.provider_tree [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.961057] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 622.961652] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 623.057990] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449444, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632704} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.058320] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5e2cf383-312b-404f-acff-2ecb75678600/5e2cf383-312b-404f-acff-2ecb75678600.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.058529] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.058789] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a52efe97-6e90-41ce-a717-413c44517b86 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.066069] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 623.066069] env[65503]: value = "task-4449447" [ 623.066069] env[65503]: _type = "Task" [ 623.066069] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.075818] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449447, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.164266] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3856f98-98dd-42fa-9b36-aa9b3b10a06a tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "eec6a484-ab00-402e-a369-c3009065c553" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.322s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.318291] env[65503]: DEBUG nova.scheduler.client.report [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 623.577746] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449447, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.186591} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.578370] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 623.581553] env[65503]: WARNING neutronclient.v2_0.client [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 623.582641] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 623.583047] env[65503]: WARNING openstack [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 623.594144] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e674fe50-5731-451d-8db2-419db6a58bb6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.622229] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 5e2cf383-312b-404f-acff-2ecb75678600/5e2cf383-312b-404f-acff-2ecb75678600.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 623.622739] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2676c4bf-9f0d-4991-9886-653547e844ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.643102] env[65503]: WARNING neutronclient.v2_0.client [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 623.643102] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 623.643102] env[65503]: WARNING openstack [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 623.655322] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 623.655850] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 623.675029] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 623.675029] env[65503]: value = "task-4449448" [ 623.675029] env[65503]: _type = "Task" [ 623.675029] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.682431] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449448, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.706932] env[65503]: DEBUG nova.network.neutron [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 623.759600] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 623.759864] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 623.760017] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 623.760279] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 623.760435] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 623.760872] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 623.760872] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.761408] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 623.761408] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 623.761408] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 623.761537] env[65503]: DEBUG nova.virt.hardware [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 623.762806] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a84a10a-cb2c-4859-9e2d-cb526902b473 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.776198] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1848e6b6-0358-40ed-b6af-20ba4c6ca801 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.794132] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 623.804155] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 623.804489] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 623.806042] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f89966c4-2185-4627-bdbe-b10756545d44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.824516] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.825443] env[65503]: DEBUG nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 623.830047] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.039s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.830274] env[65503]: DEBUG nova.objects.instance [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Lazy-loading 'resources' on Instance uuid c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 623.832334] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 623.832334] env[65503]: value = "task-4449449" [ 623.832334] env[65503]: _type = "Task" [ 623.832334] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.849217] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449449, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.185096] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449448, 'name': ReconfigVM_Task, 'duration_secs': 0.38901} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.185648] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 5e2cf383-312b-404f-acff-2ecb75678600/5e2cf383-312b-404f-acff-2ecb75678600.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 624.186307] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad594531-7637-47f9-952e-733dfd54c716 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.195682] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 624.195682] env[65503]: value = "task-4449450" [ 624.195682] env[65503]: _type = "Task" [ 624.195682] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.207163] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449450, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.340915] env[65503]: DEBUG nova.compute.utils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 624.344023] env[65503]: DEBUG nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 624.344363] env[65503]: DEBUG nova.network.neutron [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 624.344815] env[65503]: WARNING neutronclient.v2_0.client [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 624.345880] env[65503]: WARNING neutronclient.v2_0.client [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 624.346911] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 624.347636] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 624.373300] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449449, 'name': CreateVM_Task, 'duration_secs': 0.297507} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.373669] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 624.374152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.374320] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.375825] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 624.375825] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e379d87c-41ba-49bf-975b-6e8b620dabc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.381928] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 624.381928] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527a43a8-051b-a7f2-02c0-ac4721e8e253" [ 624.381928] env[65503]: _type = "Task" [ 624.381928] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.398286] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527a43a8-051b-a7f2-02c0-ac4721e8e253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.441533] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "d8d917c1-224c-4773-a911-d09f3f719e1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.441901] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.713346] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449450, 'name': Rename_Task, 'duration_secs': 0.264826} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.713652] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.717672] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ac9e7aa-4093-48ff-bf17-fd7102f16c8d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.728164] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 624.728164] env[65503]: value = "task-4449451" [ 624.728164] env[65503]: _type = "Task" [ 624.728164] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.741588] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.840332] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8189b93-e347-4bde-8782-d2e73991f57c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.850800] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e078c63-c14f-4035-a72f-e66c299c89b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.893047] env[65503]: DEBUG nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 624.900434] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5483564-6a94-490d-96b1-4d57918cff1b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.913044] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527a43a8-051b-a7f2-02c0-ac4721e8e253, 'name': SearchDatastore_Task, 'duration_secs': 0.025155} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.913452] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.913683] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 624.913956] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.914147] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.915160] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 624.915778] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2caa998-6654-45bd-a095-cda640dc5af1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.922692] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ecee832-9e89-48fc-b0af-a7a3ed4d1da3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.939763] env[65503]: DEBUG nova.compute.provider_tree [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.942457] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 624.944087] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 624.944087] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99234544-5e2c-4e61-879e-40c94bff6007 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.947823] env[65503]: DEBUG nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 624.955411] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 624.955411] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa6981-e62d-4152-bc32-1267ab1fdd3f" [ 624.955411] env[65503]: _type = "Task" [ 624.955411] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.968953] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa6981-e62d-4152-bc32-1267ab1fdd3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.126054] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 625.126456] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 625.167062] env[65503]: DEBUG nova.network.neutron [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Updating instance_info_cache with network_info: [{"id": "939876df-2b3d-4723-8926-1187feb4fe37", "address": "fa:16:3e:ac:ad:63", "network": {"id": "8a095ab6-7707-4d88-81ca-9d7588aec7de", "bridge": "br-int", "label": "tempest-ServersTestJSON-408175709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6f55d9e8eeb34665aaa465057871d687", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap939876df-2b", "ovs_interfaceid": "939876df-2b3d-4723-8926-1187feb4fe37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 625.196868] env[65503]: DEBUG nova.network.neutron [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Updated VIF entry in instance network info cache for port 74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 625.198804] env[65503]: DEBUG nova.network.neutron [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Updating instance_info_cache with network_info: [{"id": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "address": "fa:16:3e:26:d2:17", "network": {"id": "d504c3f2-514b-4ccc-a7e0-1813e7cfece6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1527786722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72a3774600024de1b1347117fd020278", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74b1a7c2-cf", "ovs_interfaceid": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 625.213022] env[65503]: WARNING neutronclient.v2_0.client [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 625.213711] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 625.214141] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 625.246579] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449451, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.308937] env[65503]: DEBUG nova.policy [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f7f16e9d1724717bd7e43299e795287', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dd65b5b754e4028a7aeecd7daaa4557', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 625.349278] env[65503]: DEBUG nova.network.neutron [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance_info_cache with network_info: [{"id": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "address": "fa:16:3e:34:a3:37", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03697e78-59", "ovs_interfaceid": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 625.362118] env[65503]: WARNING neutronclient.v2_0.client [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 625.362118] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 625.362487] env[65503]: WARNING openstack [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 625.445114] env[65503]: DEBUG nova.scheduler.client.report [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 625.478455] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa6981-e62d-4152-bc32-1267ab1fdd3f, 'name': SearchDatastore_Task, 'duration_secs': 0.01425} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.479728] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-038982aa-922d-4504-bb0e-6e38313f3a6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.489013] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 625.489013] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52015db7-2502-72bd-edab-e82ea78713c6" [ 625.489013] env[65503]: _type = "Task" [ 625.489013] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.491057] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.507168] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52015db7-2502-72bd-edab-e82ea78713c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.511907] env[65503]: DEBUG nova.network.neutron [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Updating instance_info_cache with network_info: [{"id": "3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3", "address": "fa:16:3e:c4:be:5e", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cc93e6f-5b", "ovs_interfaceid": "3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 625.670612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Releasing lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.670946] env[65503]: DEBUG nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Instance network_info: |[{"id": "939876df-2b3d-4723-8926-1187feb4fe37", "address": "fa:16:3e:ac:ad:63", "network": {"id": "8a095ab6-7707-4d88-81ca-9d7588aec7de", "bridge": "br-int", "label": "tempest-ServersTestJSON-408175709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6f55d9e8eeb34665aaa465057871d687", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap939876df-2b", "ovs_interfaceid": "939876df-2b3d-4723-8926-1187feb4fe37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 625.671637] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:ad:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '26a1e556-1ede-445a-bf87-a35daa5d9070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '939876df-2b3d-4723-8926-1187feb4fe37', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.683685] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Creating folder: Project (6f55d9e8eeb34665aaa465057871d687). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.684388] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a25bc0ee-ab6e-4b3d-b420-3d5c687acd0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.696266] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Created folder: Project (6f55d9e8eeb34665aaa465057871d687) in parent group-v870190. [ 625.696364] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Creating folder: Instances. Parent ref: group-v870227. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.696784] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59509e25-d68a-44b4-a766-09c1bc1b6381 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.701600] env[65503]: DEBUG oslo_concurrency.lockutils [req-39af398b-aa66-4d15-99e5-1104c9c4fff8 req-3b4333f2-9340-4fc3-8452-998b1477caea service nova] Releasing lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.714463] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Created folder: Instances in parent group-v870227. [ 625.714857] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 625.714999] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 625.715476] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b694a0a7-4c39-4439-a520-0d35e65be905 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.741755] env[65503]: DEBUG oslo_vmware.api [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449451, 'name': PowerOnVM_Task, 'duration_secs': 0.887346} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.742947] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 625.743173] env[65503]: INFO nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Took 9.51 seconds to spawn the instance on the hypervisor. [ 625.743346] env[65503]: DEBUG nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 625.743640] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.743640] env[65503]: value = "task-4449454" [ 625.743640] env[65503]: _type = "Task" [ 625.743640] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.744601] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959cacf8-7b76-43e0-a991-a26a4869613a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.757268] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449454, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.815191] env[65503]: DEBUG nova.network.neutron [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Successfully created port: 9f691f3d-5247-46d4-b46b-6840c2cc557d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 625.853541] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.908457] env[65503]: DEBUG nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 625.948122] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 625.948397] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 625.948543] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 625.948721] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 625.948853] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 625.948990] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 625.949214] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 625.949360] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 625.949525] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 625.950451] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 625.950451] env[65503]: DEBUG nova.virt.hardware [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 625.950975] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be2a55f-dd74-41af-a3aa-9022656e1b8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.960448] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.963195] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.058s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.963428] env[65503]: DEBUG nova.objects.instance [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lazy-loading 'resources' on Instance uuid 02b3b44e-96bb-47a0-8aa0-7026d987cad8 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 625.965722] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801fe1b9-954f-4931-84e9-9baf6f8255ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.011990] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52015db7-2502-72bd-edab-e82ea78713c6, 'name': SearchDatastore_Task, 'duration_secs': 0.029094} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.012750] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.013419] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 626.013814] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e12c49db-4ca0-45b1-880c-6e17cfde2df2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.018163] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Releasing lock "refresh_cache-2ab1cd4b-f2c0-4264-8463-8127a733a1c5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.018634] env[65503]: DEBUG nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Instance network_info: |[{"id": "3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3", "address": "fa:16:3e:c4:be:5e", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cc93e6f-5b", "ovs_interfaceid": "3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 626.019168] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:be:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.030156] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Creating folder: Project (7dd65b5b754e4028a7aeecd7daaa4557). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.032255] env[65503]: INFO nova.scheduler.client.report [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Deleted allocations for instance c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3 [ 626.034143] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-801c2302-a179-4de7-aaf2-f43b9664472c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.041809] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 626.041809] env[65503]: value = "task-4449455" [ 626.041809] env[65503]: _type = "Task" [ 626.041809] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.058472] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.058835] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Created folder: Project (7dd65b5b754e4028a7aeecd7daaa4557) in parent group-v870190. [ 626.059029] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Creating folder: Instances. Parent ref: group-v870230. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.059340] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb47bbee-fa74-41bd-a3bb-7fedf9092a9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.072600] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Created folder: Instances in parent group-v870230. [ 626.072871] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 626.073221] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.073290] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bce81e8f-5c92-4b28-a5a3-8f14ae8ac0e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.097042] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.097042] env[65503]: value = "task-4449458" [ 626.097042] env[65503]: _type = "Task" [ 626.097042] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.108987] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449458, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.222050] env[65503]: DEBUG nova.compute.manager [req-7cf1b9e9-dde1-4352-8148-a7ecf78772c2 req-29b1d065-0f41-432d-836b-f4fa3a210bba service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Received event network-vif-plugged-939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 626.224184] env[65503]: DEBUG oslo_concurrency.lockutils [req-7cf1b9e9-dde1-4352-8148-a7ecf78772c2 req-29b1d065-0f41-432d-836b-f4fa3a210bba service nova] Acquiring lock "a1908e71-31f9-4308-b4d6-7908d3208c5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.224399] env[65503]: DEBUG oslo_concurrency.lockutils [req-7cf1b9e9-dde1-4352-8148-a7ecf78772c2 req-29b1d065-0f41-432d-836b-f4fa3a210bba service nova] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.224543] env[65503]: DEBUG oslo_concurrency.lockutils [req-7cf1b9e9-dde1-4352-8148-a7ecf78772c2 req-29b1d065-0f41-432d-836b-f4fa3a210bba service nova] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.224701] env[65503]: DEBUG nova.compute.manager [req-7cf1b9e9-dde1-4352-8148-a7ecf78772c2 req-29b1d065-0f41-432d-836b-f4fa3a210bba service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] No waiting events found dispatching network-vif-plugged-939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 626.224886] env[65503]: WARNING nova.compute.manager [req-7cf1b9e9-dde1-4352-8148-a7ecf78772c2 req-29b1d065-0f41-432d-836b-f4fa3a210bba service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Received unexpected event network-vif-plugged-939876df-2b3d-4723-8926-1187feb4fe37 for instance with vm_state building and task_state spawning. [ 626.262179] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449454, 'name': CreateVM_Task, 'duration_secs': 0.43232} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.262179] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 626.262179] env[65503]: WARNING neutronclient.v2_0.client [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 626.262703] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.263155] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.264026] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 626.267601] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7781fc6d-c47c-4a46-a930-debe76e3dc89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.273753] env[65503]: INFO nova.compute.manager [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Took 25.63 seconds to build instance. [ 626.277625] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 626.277625] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523bf52b-10a5-04b3-6910-54dcee19f057" [ 626.277625] env[65503]: _type = "Task" [ 626.277625] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.288754] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523bf52b-10a5-04b3-6910-54dcee19f057, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.549560] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cfed5855-d93b-4085-ac2c-362fb4fa91cd tempest-DeleteServersAdminTestJSON-2039433906 tempest-DeleteServersAdminTestJSON-2039433906-project-admin] Lock "c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.576s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.564944] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449455, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.610475] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449458, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.777433] env[65503]: DEBUG oslo_concurrency.lockutils [None req-195b63ed-c17b-4928-9064-b50e8f5f889e tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "5e2cf383-312b-404f-acff-2ecb75678600" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.137s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.795182] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523bf52b-10a5-04b3-6910-54dcee19f057, 'name': SearchDatastore_Task, 'duration_secs': 0.019959} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.796875] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.797208] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.797470] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.797707] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.797970] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.798321] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-070798cc-89c7-4f5f-9cda-6e1011eaadbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.811085] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.811311] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.812123] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3737c79b-f382-40dc-b6e6-471ca030343f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.818386] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 626.818386] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d91266-ccff-aa83-5700-146dfa04abb4" [ 626.818386] env[65503]: _type = "Task" [ 626.818386] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.834944] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d91266-ccff-aa83-5700-146dfa04abb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.868183] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0659fde7-7be6-4d5d-9551-b48bd7900120 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.880760] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3e4e84-ff63-4fbd-a160-adee89312cc6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.925542] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d88acbc-00ae-49a3-b484-1b1010f47698 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.931464] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Received event network-vif-plugged-984474f0-e03b-413b-8c1f-8e553672a7df {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 626.931588] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Acquiring lock "eec6a484-ab00-402e-a369-c3009065c553-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.931854] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Lock "eec6a484-ab00-402e-a369-c3009065c553-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.931987] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Lock "eec6a484-ab00-402e-a369-c3009065c553-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.932246] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] No waiting events found dispatching network-vif-plugged-984474f0-e03b-413b-8c1f-8e553672a7df {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 626.933254] env[65503]: WARNING nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Received unexpected event network-vif-plugged-984474f0-e03b-413b-8c1f-8e553672a7df for instance with vm_state active and task_state None. [ 626.933254] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Received event network-changed-984474f0-e03b-413b-8c1f-8e553672a7df {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 626.933254] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Refreshing instance network info cache due to event network-changed-984474f0-e03b-413b-8c1f-8e553672a7df. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 626.933254] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Acquiring lock "refresh_cache-eec6a484-ab00-402e-a369-c3009065c553" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.933254] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Acquired lock "refresh_cache-eec6a484-ab00-402e-a369-c3009065c553" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.934557] env[65503]: DEBUG nova.network.neutron [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Refreshing network info cache for port 984474f0-e03b-413b-8c1f-8e553672a7df {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 626.942175] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598db3f1-6a17-4465-a80d-a880b2b36a0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.964070] env[65503]: DEBUG nova.compute.provider_tree [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.063126] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.811796} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.063416] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 627.063637] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 627.063901] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8466020-a930-413f-bbc3-e89e6663567f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.072078] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 627.072078] env[65503]: value = "task-4449459" [ 627.072078] env[65503]: _type = "Task" [ 627.072078] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.085857] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449459, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.109280] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449458, 'name': CreateVM_Task, 'duration_secs': 0.540788} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.109509] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.110105] env[65503]: WARNING neutronclient.v2_0.client [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 627.110597] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.110807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.111232] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.111555] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5600866e-c613-4f9a-8135-243864c87efa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.119362] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 627.119362] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5298394c-3b64-45a8-eec5-5c1b0eafd6f3" [ 627.119362] env[65503]: _type = "Task" [ 627.119362] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.127635] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5298394c-3b64-45a8-eec5-5c1b0eafd6f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.331519] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d91266-ccff-aa83-5700-146dfa04abb4, 'name': SearchDatastore_Task, 'duration_secs': 0.016716} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.332597] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b390633-5a33-4135-82c4-bc85c7a91d4d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.340164] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 627.340164] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206d321-1150-a351-04e3-00cf29651224" [ 627.340164] env[65503]: _type = "Task" [ 627.340164] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.349422] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206d321-1150-a351-04e3-00cf29651224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.375958] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43e34f2-72b8-46f3-8517-c97aeb4da5a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.400627] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance '39266117-e82e-48ae-932a-be04b1a7351a' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 627.437413] env[65503]: WARNING neutronclient.v2_0.client [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 627.438177] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 627.438534] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 627.467645] env[65503]: DEBUG nova.scheduler.client.report [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 627.479831] env[65503]: DEBUG nova.network.neutron [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Successfully updated port: 9f691f3d-5247-46d4-b46b-6840c2cc557d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 627.582323] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449459, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072822} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.582488] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 627.583246] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba342e3-901b-491f-b170-c164e19d9ac8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.615600] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 627.616387] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89678ad2-d748-49c7-b1e1-5cb519ed30d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.643928] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5298394c-3b64-45a8-eec5-5c1b0eafd6f3, 'name': SearchDatastore_Task, 'duration_secs': 0.014139} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.645610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.645845] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.646110] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.646288] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.647791] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.648064] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 627.648064] env[65503]: value = "task-4449460" [ 627.648064] env[65503]: _type = "Task" [ 627.648064] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.648277] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-338e7c71-545d-42bf-a99a-2f16fd9c0973 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.662186] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449460, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.663733] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.663887] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.664689] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a35578fb-7fac-4ddf-b906-39746b004830 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.672180] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 627.672180] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526820da-1522-9c6f-b41d-b57f6058e458" [ 627.672180] env[65503]: _type = "Task" [ 627.672180] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.681989] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526820da-1522-9c6f-b41d-b57f6058e458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.836370] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 627.836674] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 627.855980] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206d321-1150-a351-04e3-00cf29651224, 'name': SearchDatastore_Task, 'duration_secs': 0.047754} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.856264] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.856568] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a1908e71-31f9-4308-b4d6-7908d3208c5a/a1908e71-31f9-4308-b4d6-7908d3208c5a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.856795] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba337616-28bc-481b-aab0-edd4a32a4dcb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.865789] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 627.865789] env[65503]: value = "task-4449461" [ 627.865789] env[65503]: _type = "Task" [ 627.865789] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.875796] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.908792] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 627.908792] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a35bc71e-f575-4f1b-8de3-f9786749d010 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.921016] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 627.921016] env[65503]: value = "task-4449462" [ 627.921016] env[65503]: _type = "Task" [ 627.921016] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.933077] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.973642] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.976421] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.956s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.976830] env[65503]: DEBUG nova.objects.instance [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lazy-loading 'resources' on Instance uuid c6aecf44-9a23-47a2-b1aa-6530b4119b1d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 627.983036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "refresh_cache-1c598208-a4d0-46b8-9a9c-107353e957b9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.983036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired lock "refresh_cache-1c598208-a4d0-46b8-9a9c-107353e957b9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.983248] env[65503]: DEBUG nova.network.neutron [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 628.007426] env[65503]: INFO nova.scheduler.client.report [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Deleted allocations for instance 02b3b44e-96bb-47a0-8aa0-7026d987cad8 [ 628.167738] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449460, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.193065] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526820da-1522-9c6f-b41d-b57f6058e458, 'name': SearchDatastore_Task, 'duration_secs': 0.056353} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.193065] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eaaa1bd-d597-4a49-bd5b-2843733ba3ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.197183] env[65503]: WARNING neutronclient.v2_0.client [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 628.197183] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 628.197601] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 628.215235] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 628.215235] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5256a87f-de02-202a-75ed-a571cb0070c8" [ 628.215235] env[65503]: _type = "Task" [ 628.215235] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.229602] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5256a87f-de02-202a-75ed-a571cb0070c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.317382] env[65503]: DEBUG nova.network.neutron [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Updated VIF entry in instance network info cache for port 984474f0-e03b-413b-8c1f-8e553672a7df. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 628.317782] env[65503]: DEBUG nova.network.neutron [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Updating instance_info_cache with network_info: [{"id": "984474f0-e03b-413b-8c1f-8e553672a7df", "address": "fa:16:3e:d7:64:bb", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984474f0-e0", "ovs_interfaceid": "984474f0-e03b-413b-8c1f-8e553672a7df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 628.383626] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449461, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.434642] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449462, 'name': PowerOffVM_Task, 'duration_secs': 0.324386} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.435091] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 628.435300] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance '39266117-e82e-48ae-932a-be04b1a7351a' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 628.489560] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 628.489560] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 628.523802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-423fe20e-545b-4b99-ac0d-680b518f48c0 tempest-ImagesNegativeTestJSON-1486437842 tempest-ImagesNegativeTestJSON-1486437842-project-member] Lock "02b3b44e-96bb-47a0-8aa0-7026d987cad8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.999s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.552074] env[65503]: DEBUG nova.network.neutron [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 628.603309] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 628.603309] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 628.669415] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449460, 'name': ReconfigVM_Task, 'duration_secs': 0.731035} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.669808] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 628.670593] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d4785a4-ca21-4f51-875d-ea340d336ec4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.680865] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 628.680865] env[65503]: value = "task-4449463" [ 628.680865] env[65503]: _type = "Task" [ 628.680865] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.690845] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449463, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.733814] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5256a87f-de02-202a-75ed-a571cb0070c8, 'name': SearchDatastore_Task, 'duration_secs': 0.049141} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.733963] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.734708] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 2ab1cd4b-f2c0-4264-8463-8127a733a1c5/2ab1cd4b-f2c0-4264-8463-8127a733a1c5.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.734708] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93f96e72-08f0-418b-bbe2-de323f8fa7ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.746626] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 628.746626] env[65503]: value = "task-4449464" [ 628.746626] env[65503]: _type = "Task" [ 628.746626] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.757026] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449464, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.826101] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Releasing lock "refresh_cache-eec6a484-ab00-402e-a369-c3009065c553" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.826101] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Received event network-vif-plugged-3985c7bb-5579-4aeb-9dce-54e2716f7d60 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 628.826101] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Acquiring lock "5e2cf383-312b-404f-acff-2ecb75678600-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.826101] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Lock "5e2cf383-312b-404f-acff-2ecb75678600-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.826101] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Lock "5e2cf383-312b-404f-acff-2ecb75678600-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.826543] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] No waiting events found dispatching network-vif-plugged-3985c7bb-5579-4aeb-9dce-54e2716f7d60 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 628.826543] env[65503]: WARNING nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Received unexpected event network-vif-plugged-3985c7bb-5579-4aeb-9dce-54e2716f7d60 for instance with vm_state active and task_state None. [ 628.826543] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Received event network-changed-3985c7bb-5579-4aeb-9dce-54e2716f7d60 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 628.826543] env[65503]: DEBUG nova.compute.manager [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Refreshing instance network info cache due to event network-changed-3985c7bb-5579-4aeb-9dce-54e2716f7d60. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 628.826543] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Acquiring lock "refresh_cache-5e2cf383-312b-404f-acff-2ecb75678600" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.826730] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Acquired lock "refresh_cache-5e2cf383-312b-404f-acff-2ecb75678600" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.826730] env[65503]: DEBUG nova.network.neutron [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Refreshing network info cache for port 3985c7bb-5579-4aeb-9dce-54e2716f7d60 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 628.857548] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7a160b-9e5b-4f9c-95e2-76c6ad72707b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.866080] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f2c199-030a-4710-becc-1b16eef546e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.882099] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449461, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652392} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.910579] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a1908e71-31f9-4308-b4d6-7908d3208c5a/a1908e71-31f9-4308-b4d6-7908d3208c5a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.910810] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.911357] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55982446-5ce5-4ccd-8cf9-27ecd160ac30 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.914587] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ed5200-cb16-41e3-b965-3a8acd43d10d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.924810] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfcbbf3-5a9d-46f2-886e-6b10dbd9d568 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.929174] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 628.929174] env[65503]: value = "task-4449465" [ 628.929174] env[65503]: _type = "Task" [ 628.929174] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.945571] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 628.945571] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 628.945571] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 628.945825] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 628.945825] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 628.945931] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 628.946365] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.946365] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 628.946495] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 628.946665] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 628.948699] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 628.952999] env[65503]: DEBUG nova.compute.provider_tree [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.955190] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9dc53a5-ff96-4306-b673-251607c85eed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.972324] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.980559] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 628.980559] env[65503]: value = "task-4449466" [ 628.980559] env[65503]: _type = "Task" [ 628.980559] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.992979] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.197451] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449463, 'name': Rename_Task, 'duration_secs': 0.33603} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.197775] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 629.198107] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67ba99df-8091-4fcc-b5dc-c2036ae03a08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.209031] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 629.209031] env[65503]: value = "task-4449467" [ 629.209031] env[65503]: _type = "Task" [ 629.209031] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.221088] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.259703] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449464, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.330818] env[65503]: WARNING neutronclient.v2_0.client [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 629.331729] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 629.332129] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 629.446080] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074851} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.446080] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 629.446080] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a039d0c7-0a22-4baf-be76-376b53a085b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.472790] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] a1908e71-31f9-4308-b4d6-7908d3208c5a/a1908e71-31f9-4308-b4d6-7908d3208c5a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 629.473781] env[65503]: DEBUG nova.scheduler.client.report [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 629.477212] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d00b59f-cdbb-435e-aa15-cc6576a8b28e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.509265] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449466, 'name': ReconfigVM_Task, 'duration_secs': 0.240117} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.510843] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance '39266117-e82e-48ae-932a-be04b1a7351a' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 629.514336] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 629.514336] env[65503]: value = "task-4449468" [ 629.514336] env[65503]: _type = "Task" [ 629.514336] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.524864] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449468, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.560469] env[65503]: WARNING neutronclient.v2_0.client [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 629.561195] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 629.561659] env[65503]: WARNING openstack [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 629.719988] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449467, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.758815] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449464, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.795482} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.759137] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 2ab1cd4b-f2c0-4264-8463-8127a733a1c5/2ab1cd4b-f2c0-4264-8463-8127a733a1c5.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.759390] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.760050] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c00a6401-4a28-4c87-8a93-eff3dcbc8dfb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.767720] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 629.767720] env[65503]: value = "task-4449469" [ 629.767720] env[65503]: _type = "Task" [ 629.767720] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.778845] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449469, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.998442] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.022s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.004119] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.739s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.006019] env[65503]: INFO nova.compute.claims [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 630.016921] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 630.017270] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 630.017459] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 630.017701] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 630.017883] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 630.018071] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 630.018483] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 630.018868] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 630.019235] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 630.019498] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 630.019803] env[65503]: DEBUG nova.virt.hardware [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 630.026696] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Reconfiguring VM instance instance-00000001 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 630.027179] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff3dbf15-a992-45ad-90ce-07bbef6244fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.050891] env[65503]: INFO nova.scheduler.client.report [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Deleted allocations for instance c6aecf44-9a23-47a2-b1aa-6530b4119b1d [ 630.061075] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.063700] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 630.063700] env[65503]: value = "task-4449470" [ 630.063700] env[65503]: _type = "Task" [ 630.063700] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.076584] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449470, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.221097] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449467, 'name': PowerOnVM_Task} progress is 64%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.279284] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449469, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068037} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.279668] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.280662] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109b1816-d3ac-4028-bf51-f589d8a9fa3c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.304874] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 2ab1cd4b-f2c0-4264-8463-8127a733a1c5/2ab1cd4b-f2c0-4264-8463-8127a733a1c5.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.311965] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ae4d7f7-54b3-4a82-8a11-d1a28bcc6313 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.335414] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 630.335414] env[65503]: value = "task-4449471" [ 630.335414] env[65503]: _type = "Task" [ 630.335414] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.341776] env[65503]: DEBUG nova.network.neutron [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Updating instance_info_cache with network_info: [{"id": "9f691f3d-5247-46d4-b46b-6840c2cc557d", "address": "fa:16:3e:0c:86:0e", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f691f3d-52", "ovs_interfaceid": "9f691f3d-5247-46d4-b46b-6840c2cc557d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 630.350437] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449471, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.433667] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 630.433667] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 630.521628] env[65503]: WARNING neutronclient.v2_0.client [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 630.521628] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 630.521628] env[65503]: WARNING openstack [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 630.545389] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449468, 'name': ReconfigVM_Task, 'duration_secs': 0.950787} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.545834] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Reconfigured VM instance instance-0000000d to attach disk [datastore2] a1908e71-31f9-4308-b4d6-7908d3208c5a/a1908e71-31f9-4308-b4d6-7908d3208c5a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 630.546822] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4db9b87-48a1-4eba-8933-883ab8f603f4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.557259] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 630.557259] env[65503]: value = "task-4449472" [ 630.557259] env[65503]: _type = "Task" [ 630.557259] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.571319] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449472, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.571824] env[65503]: DEBUG oslo_concurrency.lockutils [None req-22704d05-6b35-4ac8-8684-fc8bf268adae tempest-ServersAaction247Test-1087918641 tempest-ServersAaction247Test-1087918641-project-member] Lock "c6aecf44-9a23-47a2-b1aa-6530b4119b1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.239s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.582818] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449470, 'name': ReconfigVM_Task, 'duration_secs': 0.325267} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.583179] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Reconfigured VM instance instance-00000001 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 630.584673] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcf4de1-6423-4e20-a3d8-25d569ce1701 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.619692] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 39266117-e82e-48ae-932a-be04b1a7351a/39266117-e82e-48ae-932a-be04b1a7351a.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.620388] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf412799-a901-4ee8-838c-eee828ef58e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.642406] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 630.642406] env[65503]: value = "task-4449473" [ 630.642406] env[65503]: _type = "Task" [ 630.642406] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.653742] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449473, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.723411] env[65503]: DEBUG oslo_vmware.api [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449467, 'name': PowerOnVM_Task, 'duration_secs': 1.479516} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.723553] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 630.725213] env[65503]: DEBUG nova.compute.manager [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 630.725213] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87db92bc-978a-4428-915f-2833e38bc59b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.773750] env[65503]: DEBUG nova.network.neutron [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Updated VIF entry in instance network info cache for port 3985c7bb-5579-4aeb-9dce-54e2716f7d60. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 630.774261] env[65503]: DEBUG nova.network.neutron [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Updating instance_info_cache with network_info: [{"id": "3985c7bb-5579-4aeb-9dce-54e2716f7d60", "address": "fa:16:3e:95:7f:89", "network": {"id": "bf2cd38a-5089-47db-bb1c-35c17dddd3b2", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1414758544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5db0fe1dba4a49a57fac8ebceab968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3985c7bb-55", "ovs_interfaceid": "3985c7bb-5579-4aeb-9dce-54e2716f7d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 630.844192] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Releasing lock "refresh_cache-1c598208-a4d0-46b8-9a9c-107353e957b9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.844606] env[65503]: DEBUG nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Instance network_info: |[{"id": "9f691f3d-5247-46d4-b46b-6840c2cc557d", "address": "fa:16:3e:0c:86:0e", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f691f3d-52", "ovs_interfaceid": "9f691f3d-5247-46d4-b46b-6840c2cc557d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 630.845114] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:86:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f691f3d-5247-46d4-b46b-6840c2cc557d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.857195] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 630.861627] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 630.861933] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449471, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.862162] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64b53fb2-0750-4cd5-bdf5-86391065d11f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.885276] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.885276] env[65503]: value = "task-4449474" [ 630.885276] env[65503]: _type = "Task" [ 630.885276] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.896468] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449474, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.069532] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449472, 'name': Rename_Task, 'duration_secs': 0.293453} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.070015] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.070538] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a573c2a7-1e93-423e-a8dd-97069c0132b5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.079569] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 631.079569] env[65503]: value = "task-4449475" [ 631.079569] env[65503]: _type = "Task" [ 631.079569] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.096106] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.159979] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449473, 'name': ReconfigVM_Task, 'duration_secs': 0.499376} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.160793] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 39266117-e82e-48ae-932a-be04b1a7351a/39266117-e82e-48ae-932a-be04b1a7351a.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.160991] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance '39266117-e82e-48ae-932a-be04b1a7351a' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 631.183399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "16d508f9-72f0-4853-92fb-c8c7a37b5668" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.184420] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.252865] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.280569] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffc84b24-6b8a-40f2-b3b3-eeb5b368a259 req-e2e448bc-b695-4748-b04e-2ed15a03495f service nova] Releasing lock "refresh_cache-5e2cf383-312b-404f-acff-2ecb75678600" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.347294] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449471, 'name': ReconfigVM_Task, 'duration_secs': 0.567194} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.351292] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 2ab1cd4b-f2c0-4264-8463-8127a733a1c5/2ab1cd4b-f2c0-4264-8463-8127a733a1c5.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.353369] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98bfb482-13aa-491a-8c1a-df19c0665723 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.361799] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 631.361799] env[65503]: value = "task-4449476" [ 631.361799] env[65503]: _type = "Task" [ 631.361799] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.371951] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449476, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.399288] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449474, 'name': CreateVM_Task, 'duration_secs': 0.375501} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.400044] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 631.400173] env[65503]: WARNING neutronclient.v2_0.client [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.400794] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.400794] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.401018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 631.401361] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e50886dc-adf0-4113-af94-b6f01cfe0a71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.407730] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 631.407730] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b328f2-c4ac-bb09-5569-fadafebfcfca" [ 631.407730] env[65503]: _type = "Task" [ 631.407730] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.417825] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b328f2-c4ac-bb09-5569-fadafebfcfca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.421804] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4175bc4a-667d-4485-92c9-5e26d44c62c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.430262] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7a7421-aeff-4c20-84c3-010eadaac7da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.471256] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32458aa5-4e8f-4e1e-b77d-0b9968940da4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.478965] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9408e274-7353-467d-9bdf-e1e8a824784e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.494024] env[65503]: DEBUG nova.compute.provider_tree [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.592041] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449475, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.669965] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd01c018-d1d0-4749-ba44-41c8ce1084e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.706195] env[65503]: DEBUG nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 631.712684] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f15eb95-2820-44b8-8a9a-12ff818fb8f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.742226] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance '39266117-e82e-48ae-932a-be04b1a7351a' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 631.876060] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449476, 'name': Rename_Task, 'duration_secs': 0.20766} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.876489] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.876815] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0255d0c-b52d-440f-ab80-57b110e6ae9e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.885850] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 631.885850] env[65503]: value = "task-4449477" [ 631.885850] env[65503]: _type = "Task" [ 631.885850] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.906078] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.921307] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b328f2-c4ac-bb09-5569-fadafebfcfca, 'name': SearchDatastore_Task, 'duration_secs': 0.018427} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.921764] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.923365] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.923623] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.924209] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.924209] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 631.925062] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69b9a24a-c01e-4a0c-b737-56b8ae528188 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.937124] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.937124] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 631.937297] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-089a4015-191b-40a5-a6dc-2fdb075baca4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.947724] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 631.947724] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa03d8-adbc-de6b-fe0f-c18dc6d94354" [ 631.947724] env[65503]: _type = "Task" [ 631.947724] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.956531] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa03d8-adbc-de6b-fe0f-c18dc6d94354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.998587] env[65503]: DEBUG nova.scheduler.client.report [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.101360] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449475, 'name': PowerOnVM_Task} progress is 91%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.246447] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.250805] env[65503]: WARNING neutronclient.v2_0.client [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 632.278860] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "24e054d7-7662-47ef-8f69-4738c5ff9548" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.280260] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.295657] env[65503]: DEBUG nova.network.neutron [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Port 03697e78-5935-45aa-a1d5-1bf8701e3f56 binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 632.397592] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449477, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.463380] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa03d8-adbc-de6b-fe0f-c18dc6d94354, 'name': SearchDatastore_Task, 'duration_secs': 0.012175} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.464291] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64f89f04-cf86-4409-b622-b12fd878c724 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.470716] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 632.470716] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b1dcfc-3a36-a429-40ba-6bbf2aaeeecd" [ 632.470716] env[65503]: _type = "Task" [ 632.470716] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.480888] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b1dcfc-3a36-a429-40ba-6bbf2aaeeecd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.505698] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.506547] env[65503]: DEBUG nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 632.509228] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.145s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.512776] env[65503]: INFO nova.compute.claims [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.592187] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Received event network-changed-939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 632.592187] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Refreshing instance network info cache due to event network-changed-939876df-2b3d-4723-8926-1187feb4fe37. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 632.592187] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Acquiring lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.594539] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Acquired lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.594539] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Refreshing network info cache for port 939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 632.602370] env[65503]: DEBUG oslo_vmware.api [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449475, 'name': PowerOnVM_Task, 'duration_secs': 1.042395} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.602370] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.602370] env[65503]: INFO nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Took 14.27 seconds to spawn the instance on the hypervisor. [ 632.602370] env[65503]: DEBUG nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 632.602370] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3189297c-8ee7-490f-bcba-12ea80064f4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.787240] env[65503]: DEBUG nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 632.825828] env[65503]: DEBUG nova.compute.manager [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Received event network-vif-plugged-3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 632.826779] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Acquiring lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.827040] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.827219] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.827410] env[65503]: DEBUG nova.compute.manager [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] No waiting events found dispatching network-vif-plugged-3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 632.827580] env[65503]: WARNING nova.compute.manager [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Received unexpected event network-vif-plugged-3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 for instance with vm_state building and task_state spawning. [ 632.827717] env[65503]: DEBUG nova.compute.manager [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Received event network-changed-3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 632.827896] env[65503]: DEBUG nova.compute.manager [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Refreshing instance network info cache due to event network-changed-3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 632.828091] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Acquiring lock "refresh_cache-2ab1cd4b-f2c0-4264-8463-8127a733a1c5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.828218] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Acquired lock "refresh_cache-2ab1cd4b-f2c0-4264-8463-8127a733a1c5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.828366] env[65503]: DEBUG nova.network.neutron [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Refreshing network info cache for port 3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 632.881256] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "9dbaff4f-ab02-481b-b51f-b134021d277c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.881679] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.900637] env[65503]: DEBUG oslo_vmware.api [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449477, 'name': PowerOnVM_Task, 'duration_secs': 0.615662} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.901508] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.901785] env[65503]: INFO nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Took 12.03 seconds to spawn the instance on the hypervisor. [ 632.901865] env[65503]: DEBUG nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 632.902694] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f4917f-d65f-48b1-a3c0-c488985be817 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.912591] env[65503]: WARNING oslo_messaging._drivers.amqpdriver [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 632.983822] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b1dcfc-3a36-a429-40ba-6bbf2aaeeecd, 'name': SearchDatastore_Task, 'duration_secs': 0.02695} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.985008] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.985176] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 1c598208-a4d0-46b8-9a9c-107353e957b9/1c598208-a4d0-46b8-9a9c-107353e957b9.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 632.985548] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa29e2e9-d2e2-4b2e-993f-ef5c17239e7b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.995089] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 632.995089] env[65503]: value = "task-4449478" [ 632.995089] env[65503]: _type = "Task" [ 632.995089] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.009736] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449478, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.021284] env[65503]: DEBUG nova.compute.utils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.023167] env[65503]: DEBUG nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 633.023559] env[65503]: DEBUG nova.network.neutron [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 633.024194] env[65503]: WARNING neutronclient.v2_0.client [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 633.024955] env[65503]: WARNING neutronclient.v2_0.client [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 633.025640] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 633.026232] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 633.097064] env[65503]: WARNING neutronclient.v2_0.client [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 633.097789] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 633.098165] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 633.130008] env[65503]: INFO nova.compute.manager [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Took 27.28 seconds to build instance. [ 633.240204] env[65503]: DEBUG nova.policy [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d5194dd2bd49cda00feb316dd08ae4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c024f22a228f4d2faa4b9316ca53a1ea', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 633.318422] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.318796] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.319116] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.320512] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.320512] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.323915] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.330473] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "39266117-e82e-48ae-932a-be04b1a7351a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.330724] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.330884] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.332736] env[65503]: INFO nova.compute.manager [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Terminating instance [ 633.334188] env[65503]: WARNING neutronclient.v2_0.client [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 633.334945] env[65503]: WARNING openstack [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 633.335826] env[65503]: WARNING openstack [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 633.386099] env[65503]: DEBUG nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 633.425140] env[65503]: INFO nova.compute.manager [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Took 26.96 seconds to build instance. [ 633.507889] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449478, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.538045] env[65503]: DEBUG nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 633.632303] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98a4b9df-ca08-4e80-81b7-6a80a73753bf tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.805s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.658539] env[65503]: DEBUG nova.compute.manager [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 633.659529] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049886bf-f570-44fe-a353-2f0b53ed3399 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.849679] env[65503]: DEBUG nova.compute.manager [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 633.849863] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.850803] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0796dd-8433-4f53-8784-047b060b8f8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.864888] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 633.864888] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c405ab2-906c-4903-ab1d-172f563f031a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.876405] env[65503]: DEBUG oslo_vmware.api [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 633.876405] env[65503]: value = "task-4449479" [ 633.876405] env[65503]: _type = "Task" [ 633.876405] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.886352] env[65503]: DEBUG oslo_vmware.api [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.915293] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.929844] env[65503]: DEBUG oslo_concurrency.lockutils [None req-40fc23ff-008f-4c07-b977-ca1ed0b514e1 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.480s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.972224] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea1f274-4e40-4d3d-886f-6c64f75cf5e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.983824] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cf8fea-d1eb-470c-9712-d6a46713adab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.023825] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692a42ab-9265-41ef-abe2-d683ce0def15 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.036575] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927245a5-3289-4127-8c4c-453e689ed4a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.040839] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449478, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587274} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.041136] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 1c598208-a4d0-46b8-9a9c-107353e957b9/1c598208-a4d0-46b8-9a9c-107353e957b9.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 634.041387] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 634.042889] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1c7ffba-d51d-450d-b31c-a45e105cf033 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.057404] env[65503]: DEBUG nova.compute.provider_tree [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.065377] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 634.065377] env[65503]: value = "task-4449480" [ 634.065377] env[65503]: _type = "Task" [ 634.065377] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.077768] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449480, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.176311] env[65503]: INFO nova.compute.manager [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] instance snapshotting [ 634.179604] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6c1cea-475d-4834-879a-76a9210c64b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.200312] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4397f7f-78e5-4549-afa2-5368a6bc7d3a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.337292] env[65503]: WARNING neutronclient.v2_0.client [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 634.387790] env[65503]: DEBUG oslo_vmware.api [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449479, 'name': PowerOffVM_Task, 'duration_secs': 0.427058} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.388085] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 634.388272] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 634.388501] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf74c93e-5f42-464e-97da-30fab049b2e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.431022] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 634.431022] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 634.467113] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 634.467113] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 634.467113] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Deleting the datastore file [datastore1] 2dbc3860-c65c-4cbb-8d90-f1f74420e652 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 634.471913] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89e46730-7d06-404c-acde-18e4704aa685 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.481171] env[65503]: DEBUG oslo_vmware.api [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for the task: (returnval){ [ 634.481171] env[65503]: value = "task-4449482" [ 634.481171] env[65503]: _type = "Task" [ 634.481171] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.489531] env[65503]: DEBUG oslo_vmware.api [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.522667] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "ab09cfe5-8257-462b-9ebf-87081d5793ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.522667] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "ab09cfe5-8257-462b-9ebf-87081d5793ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.563308] env[65503]: DEBUG nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 634.564961] env[65503]: DEBUG nova.scheduler.client.report [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 634.583681] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449480, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115977} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.584026] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 634.584954] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beec53c9-4db7-460d-9423-f001d84543f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.611432] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 1c598208-a4d0-46b8-9a9c-107353e957b9/1c598208-a4d0-46b8-9a9c-107353e957b9.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 634.615035] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3991f3f4-016d-4b22-bbef-67c092423fbe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.639316] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 634.639316] env[65503]: value = "task-4449483" [ 634.639316] env[65503]: _type = "Task" [ 634.639316] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.644325] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:47:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1713654835',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1999222197',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 634.644637] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 634.644827] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 634.645061] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 634.645243] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 634.645421] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 634.645666] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 634.645856] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 634.646062] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 634.648616] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 634.648616] env[65503]: DEBUG nova.virt.hardware [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 634.648616] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbf5a9f-0603-478b-93b4-0093be5fc4c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.666862] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de67cbfc-0d6f-4f8c-9908-1c26bc3a9f58 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.674656] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.697592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.697763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.697937] env[65503]: DEBUG nova.network.neutron [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 634.713057] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 634.713598] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1811f6dd-0f76-4773-9bd8-6feb12a7b0c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.724028] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 634.724028] env[65503]: value = "task-4449484" [ 634.724028] env[65503]: _type = "Task" [ 634.724028] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.735886] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449484, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.819128] env[65503]: DEBUG nova.network.neutron [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Successfully created port: ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 635.000037] env[65503]: DEBUG oslo_vmware.api [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Task: {'id': task-4449482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.417833} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.000990] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.000990] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 635.000990] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.001412] env[65503]: INFO nova.compute.manager [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Took 1.15 seconds to destroy the instance on the hypervisor. [ 635.001698] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 635.001921] env[65503]: DEBUG nova.compute.manager [-] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 635.002108] env[65503]: DEBUG nova.network.neutron [-] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 635.002287] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 635.003044] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 635.003348] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 635.025403] env[65503]: DEBUG nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 635.079017] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.079926] env[65503]: DEBUG nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 635.083941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.745s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.086925] env[65503]: INFO nova.compute.claims [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.154637] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449483, 'name': ReconfigVM_Task, 'duration_secs': 0.443099} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.154994] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 1c598208-a4d0-46b8-9a9c-107353e957b9/1c598208-a4d0-46b8-9a9c-107353e957b9.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 635.155671] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec1b6a6e-1499-49ed-89c6-bc39f649e3a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.164918] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 635.164918] env[65503]: value = "task-4449485" [ 635.164918] env[65503]: _type = "Task" [ 635.164918] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.176497] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449485, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.203071] env[65503]: WARNING neutronclient.v2_0.client [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 635.203761] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 635.205224] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 635.241273] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449484, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.560504] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.593232] env[65503]: DEBUG nova.compute.utils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 635.596710] env[65503]: DEBUG nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 635.596954] env[65503]: DEBUG nova.network.neutron [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 635.597326] env[65503]: WARNING neutronclient.v2_0.client [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 635.597686] env[65503]: WARNING neutronclient.v2_0.client [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 635.598314] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 635.598916] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 635.678172] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449485, 'name': Rename_Task, 'duration_secs': 0.17827} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.679335] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 635.679671] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f7e3a36-4098-4791-93fb-da9fe208c92b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.687418] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 635.687418] env[65503]: value = "task-4449486" [ 635.687418] env[65503]: _type = "Task" [ 635.687418] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.697671] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.736811] env[65503]: WARNING openstack [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 635.737488] env[65503]: WARNING openstack [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 635.748918] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449484, 'name': CreateSnapshot_Task, 'duration_secs': 0.62558} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.753029] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 635.753029] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9620f9c-0827-4ecb-aa43-23e0b2f4765c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.926030] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 636.021799] env[65503]: DEBUG nova.policy [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2881f5a1d64b477b947a108e381951ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a840b7f7e86745d79af6e9e1196e20fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.051929] env[65503]: WARNING neutronclient.v2_0.client [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 636.052655] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 636.053025] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 636.098915] env[65503]: DEBUG nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 636.201842] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449486, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.275441] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 636.278509] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4844dfb5-6713-403f-b8b5-53c56a12ad76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.291911] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 636.291911] env[65503]: value = "task-4449487" [ 636.291911] env[65503]: _type = "Task" [ 636.291911] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.304057] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449487, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.491898] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3af596-1922-46a5-a934-180dc56933d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.501455] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3d6788-1a3b-4f46-94d1-71082b1582bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.540739] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495415d7-c2a1-453d-afac-c471380abd95 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.548964] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c1f01b-b981-4d53-95ad-88bc7107e2da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.570058] env[65503]: DEBUG nova.compute.provider_tree [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.709030] env[65503]: DEBUG oslo_vmware.api [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449486, 'name': PowerOnVM_Task, 'duration_secs': 0.941065} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.709456] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 636.709602] env[65503]: INFO nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Took 10.80 seconds to spawn the instance on the hypervisor. [ 636.709897] env[65503]: DEBUG nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 636.711159] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e052d744-7384-4039-aec9-ad79c99e24cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.804822] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449487, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.074313] env[65503]: DEBUG nova.scheduler.client.report [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.083594] env[65503]: DEBUG nova.network.neutron [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Successfully created port: 88cbb4cd-7967-47a9-9cda-a3456020aefa {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 637.110436] env[65503]: DEBUG nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 637.147548] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 637.147800] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.147996] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 637.148116] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.148245] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 637.148398] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 637.148596] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 637.148743] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 637.148898] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 637.149058] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 637.149249] env[65503]: DEBUG nova.virt.hardware [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 637.150307] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5be7203-503d-45b6-bc37-203d11c2b406 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.159342] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae45b74-cc91-404f-a78c-ed6b70e6b1f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.165544] env[65503]: WARNING neutronclient.v2_0.client [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 637.166632] env[65503]: WARNING openstack [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 637.166632] env[65503]: WARNING openstack [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 637.185744] env[65503]: DEBUG nova.network.neutron [-] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 637.248090] env[65503]: INFO nova.compute.manager [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Took 27.97 seconds to build instance. [ 637.306730] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449487, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.400056] env[65503]: DEBUG nova.network.neutron [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Successfully updated port: ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 637.433678] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 637.434106] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 637.583400] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.583956] env[65503]: DEBUG nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 637.591985] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.098s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.592286] env[65503]: INFO nova.compute.claims [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.691376] env[65503]: INFO nova.compute.manager [-] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Took 2.69 seconds to deallocate network for instance. [ 637.752607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ef8baa5-647b-45e3-848c-0a86a1bfccdb tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.483s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.806392] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449487, 'name': CloneVM_Task, 'duration_secs': 1.467731} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.806972] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Created linked-clone VM from snapshot [ 637.808619] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f29efb-0849-4187-abdf-c6cc1504c42a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.820151] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Uploading image f4a072a1-0a6b-43fe-9278-cc47fc5f2852 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 637.855188] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 637.855188] env[65503]: value = "vm-870235" [ 637.855188] env[65503]: _type = "VirtualMachine" [ 637.855188] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 637.855610] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-32c1a6b9-1868-4be2-8d52-a91b1af6c0f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.865381] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lease: (returnval){ [ 637.865381] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbde6c-8416-b69c-6330-0d52622ccc9a" [ 637.865381] env[65503]: _type = "HttpNfcLease" [ 637.865381] env[65503]: } obtained for exporting VM: (result){ [ 637.865381] env[65503]: value = "vm-870235" [ 637.865381] env[65503]: _type = "VirtualMachine" [ 637.865381] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 637.865678] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the lease: (returnval){ [ 637.865678] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbde6c-8416-b69c-6330-0d52622ccc9a" [ 637.865678] env[65503]: _type = "HttpNfcLease" [ 637.865678] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 637.878305] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 637.878305] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbde6c-8416-b69c-6330-0d52622ccc9a" [ 637.878305] env[65503]: _type = "HttpNfcLease" [ 637.878305] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 637.884759] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Updated VIF entry in instance network info cache for port 939876df-2b3d-4723-8926-1187feb4fe37. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 637.885251] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Updating instance_info_cache with network_info: [{"id": "939876df-2b3d-4723-8926-1187feb4fe37", "address": "fa:16:3e:ac:ad:63", "network": {"id": "8a095ab6-7707-4d88-81ca-9d7588aec7de", "bridge": "br-int", "label": "tempest-ServersTestJSON-408175709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f55d9e8eeb34665aaa465057871d687", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap939876df-2b", "ovs_interfaceid": "939876df-2b3d-4723-8926-1187feb4fe37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 637.902929] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.903194] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.903367] env[65503]: DEBUG nova.network.neutron [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 638.099717] env[65503]: DEBUG nova.compute.utils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 638.105788] env[65503]: DEBUG nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 638.105788] env[65503]: DEBUG nova.network.neutron [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 638.105788] env[65503]: WARNING neutronclient.v2_0.client [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 638.105788] env[65503]: WARNING neutronclient.v2_0.client [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 638.106185] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 638.106530] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 638.200113] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.376360] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 638.376360] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbde6c-8416-b69c-6330-0d52622ccc9a" [ 638.376360] env[65503]: _type = "HttpNfcLease" [ 638.376360] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 638.376826] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 638.376826] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbde6c-8416-b69c-6330-0d52622ccc9a" [ 638.376826] env[65503]: _type = "HttpNfcLease" [ 638.376826] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 638.377490] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905b93cf-9637-40e0-a59e-d868e35053f4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.387846] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Releasing lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.388323] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Received event network-changed-eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 638.388323] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Refreshing instance network info cache due to event network-changed-eb97a8c5-41a9-42ff-80fe-382fbcdc440a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 638.388673] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Acquiring lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.388673] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Acquired lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.388801] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Refreshing network info cache for port eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 638.392964] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e74cf6-d4f8-5a93-641e-f548bed32c94/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 638.393138] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e74cf6-d4f8-5a93-641e-f548bed32c94/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 638.396772] env[65503]: DEBUG nova.network.neutron [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Updated VIF entry in instance network info cache for port 3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 638.397133] env[65503]: DEBUG nova.network.neutron [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Updating instance_info_cache with network_info: [{"id": "3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3", "address": "fa:16:3e:c4:be:5e", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cc93e6f-5b", "ovs_interfaceid": "3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 638.458633] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 638.459025] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 638.473025] env[65503]: DEBUG nova.policy [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55db708d2a9b47baa25cafed2be1ba91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '463e93d05e1e4b27a3dc866a5b1991d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 638.532387] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c3906f3c-3d5e-46c3-92f9-320253b81321 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.605765] env[65503]: DEBUG nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 638.736537] env[65503]: WARNING neutronclient.v2_0.client [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 638.737496] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 638.737590] env[65503]: WARNING openstack [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 638.802774] env[65503]: DEBUG nova.network.neutron [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 638.900772] env[65503]: WARNING neutronclient.v2_0.client [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 638.901809] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 638.902169] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 638.913029] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd2905cd-bbaa-431c-bf60-e3eb67a158a2 req-c6b84121-af42-4865-b146-31688a23ad88 service nova] Releasing lock "refresh_cache-2ab1cd4b-f2c0-4264-8463-8127a733a1c5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.070534] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2c37f6-9fac-46b0-a4d7-1ffa3f51fa0e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.084680] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930ab312-c587-4657-9c4b-82e3117ac8b5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.102874] env[65503]: DEBUG nova.network.neutron [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Successfully updated port: 88cbb4cd-7967-47a9-9cda-a3456020aefa {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 639.155402] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745c050e-621d-4832-a2d4-084333ea18d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.166478] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcf72f6-dece-4eda-a6dd-ab57a5fe5412 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.188931] env[65503]: DEBUG nova.compute.provider_tree [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.200645] env[65503]: DEBUG nova.network.neutron [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Successfully created port: 831f4b30-3858-4674-b6f1-f7f00241ca20 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 639.216641] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 639.217142] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 639.269424] env[65503]: DEBUG nova.network.neutron [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance_info_cache with network_info: [{"id": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "address": "fa:16:3e:34:a3:37", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03697e78-59", "ovs_interfaceid": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 639.374962] env[65503]: WARNING neutronclient.v2_0.client [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 639.376264] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 639.376264] env[65503]: WARNING openstack [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 639.455850] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 639.458907] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 639.608063] env[65503]: INFO nova.compute.manager [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Rebuilding instance [ 639.612011] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "refresh_cache-85d0ed1d-6306-4999-832b-f4e69233fec7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.612011] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquired lock "refresh_cache-85d0ed1d-6306-4999-832b-f4e69233fec7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.612240] env[65503]: DEBUG nova.network.neutron [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 639.629846] env[65503]: DEBUG nova.network.neutron [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Updating instance_info_cache with network_info: [{"id": "ea1023c2-b650-4073-a973-29291b753f53", "address": "fa:16:3e:dc:11:e9", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea1023c2-b6", "ovs_interfaceid": "ea1023c2-b650-4073-a973-29291b753f53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 639.659906] env[65503]: DEBUG nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 639.664214] env[65503]: WARNING neutronclient.v2_0.client [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 639.664214] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 639.666177] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 639.689929] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 639.689929] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 639.690249] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 639.690249] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 639.690338] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 639.690452] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 639.690655] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.690825] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 639.690993] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 639.691155] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 639.691317] env[65503]: DEBUG nova.virt.hardware [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 639.692313] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc021105-cab4-4c17-a59a-94704659e8ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.700655] env[65503]: DEBUG nova.scheduler.client.report [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.711614] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4fff8c-b259-45ae-a2af-5348e1ad0bba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.717029] env[65503]: DEBUG nova.compute.manager [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 639.718405] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56fc624-fd24-420c-b8a5-ec1e4cd5a5c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.773722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.810120] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Updated VIF entry in instance network info cache for port eb97a8c5-41a9-42ff-80fe-382fbcdc440a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 639.810623] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Updating instance_info_cache with network_info: [{"id": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "address": "fa:16:3e:0d:d9:4c", "network": {"id": "cb51b25b-9fc3-434c-8e76-3c58b174b65d", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2062279965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c519075bc624e7b90915354752765da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb97a8c5-41", "ovs_interfaceid": "eb97a8c5-41a9-42ff-80fe-382fbcdc440a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 640.059936] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "8c274097-234a-44be-9159-c2fb0f1a8da1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.060709] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.119959] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 640.121032] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 640.139189] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Releasing lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.139601] env[65503]: DEBUG nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Instance network_info: |[{"id": "ea1023c2-b650-4073-a973-29291b753f53", "address": "fa:16:3e:dc:11:e9", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea1023c2-b6", "ovs_interfaceid": "ea1023c2-b650-4073-a973-29291b753f53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 640.140075] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:11:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea1023c2-b650-4073-a973-29291b753f53', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.148043] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Creating folder: Project (c024f22a228f4d2faa4b9316ca53a1ea). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.148564] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e825b0e2-750f-41eb-bb30-a0690267c0a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.162153] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Created folder: Project (c024f22a228f4d2faa4b9316ca53a1ea) in parent group-v870190. [ 640.162440] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Creating folder: Instances. Parent ref: group-v870236. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.162733] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2dd068c7-08ba-4e9c-9ee2-86ac2e7f1e25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.176771] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Created folder: Instances in parent group-v870236. [ 640.177056] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 640.178539] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.178539] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-102f1bd0-4fe4-434e-b174-f6cb428eb72a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.201924] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.201924] env[65503]: value = "task-4449491" [ 640.201924] env[65503]: _type = "Task" [ 640.201924] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.212407] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.213114] env[65503]: DEBUG nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 640.216047] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449491, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.216699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 8.964s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.216940] env[65503]: DEBUG nova.objects.instance [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 640.253857] env[65503]: DEBUG nova.network.neutron [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 640.316823] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Releasing lock "refresh_cache-f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.317121] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Received event network-vif-plugged-9f691f3d-5247-46d4-b46b-6840c2cc557d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 640.317594] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Acquiring lock "1c598208-a4d0-46b8-9a9c-107353e957b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.317871] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.318065] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.318437] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] No waiting events found dispatching network-vif-plugged-9f691f3d-5247-46d4-b46b-6840c2cc557d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 640.318614] env[65503]: WARNING nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Received unexpected event network-vif-plugged-9f691f3d-5247-46d4-b46b-6840c2cc557d for instance with vm_state building and task_state spawning. [ 640.318849] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Received event network-changed-9f691f3d-5247-46d4-b46b-6840c2cc557d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 640.319141] env[65503]: DEBUG nova.compute.manager [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Refreshing instance network info cache due to event network-changed-9f691f3d-5247-46d4-b46b-6840c2cc557d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 640.319358] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Acquiring lock "refresh_cache-1c598208-a4d0-46b8-9a9c-107353e957b9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.319628] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Acquired lock "refresh_cache-1c598208-a4d0-46b8-9a9c-107353e957b9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.319808] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Refreshing network info cache for port 9f691f3d-5247-46d4-b46b-6840c2cc557d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 640.322558] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526b97a3-b161-4ede-975b-b12144eabc6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.351198] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d29a07e-d8e9-45c2-8aa1-7c98e472fecf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.361413] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance '39266117-e82e-48ae-932a-be04b1a7351a' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 640.564307] env[65503]: DEBUG nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 640.715985] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449491, 'name': CreateVM_Task, 'duration_secs': 0.500762} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.716475] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 640.718421] env[65503]: DEBUG nova.compute.utils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 640.720778] env[65503]: WARNING neutronclient.v2_0.client [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 640.721395] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.721666] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.722126] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 640.723108] env[65503]: DEBUG nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 640.723439] env[65503]: DEBUG nova.network.neutron [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 640.723915] env[65503]: WARNING neutronclient.v2_0.client [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 640.724438] env[65503]: WARNING neutronclient.v2_0.client [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 640.725629] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 640.726314] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 640.737682] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6105c679-766f-4dc8-b1ac-7864f3d06b44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.745993] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 640.745993] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e844b8-e516-4def-c3c6-6f9da21e5e1c" [ 640.745993] env[65503]: _type = "Task" [ 640.745993] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.758348] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 640.759448] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e844b8-e516-4def-c3c6-6f9da21e5e1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.759853] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82c1817e-c9a5-4d01-8680-084f2832dfff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.771018] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 640.771018] env[65503]: value = "task-4449492" [ 640.771018] env[65503]: _type = "Task" [ 640.771018] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.783865] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449492, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.830033] env[65503]: WARNING neutronclient.v2_0.client [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 640.830033] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 640.830033] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 640.858234] env[65503]: DEBUG nova.network.neutron [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Successfully updated port: 831f4b30-3858-4674-b6f1-f7f00241ca20 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 640.872207] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 640.874551] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb1c4546-1c8d-4905-971e-0ff848dfab2b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.879111] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 640.879670] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 640.895331] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 640.895331] env[65503]: value = "task-4449493" [ 640.895331] env[65503]: _type = "Task" [ 640.895331] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.907036] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.918344] env[65503]: DEBUG nova.compute.manager [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Received event network-changed-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 640.918344] env[65503]: DEBUG nova.compute.manager [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Refreshing instance network info cache due to event network-changed-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 640.918344] env[65503]: DEBUG oslo_concurrency.lockutils [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Acquiring lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.918344] env[65503]: DEBUG oslo_concurrency.lockutils [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Acquired lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.918985] env[65503]: DEBUG nova.network.neutron [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Refreshing network info cache for port 74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 641.091729] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.224217] env[65503]: DEBUG nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 641.243422] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60b00ae4-3f08-4985-98c2-f4daf71727c8 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.247374] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.998s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.251099] env[65503]: INFO nova.compute.claims [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.273579] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e844b8-e516-4def-c3c6-6f9da21e5e1c, 'name': SearchDatastore_Task, 'duration_secs': 0.01686} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.279158] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.279811] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.280877] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.280877] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.280877] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.284523] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5359fc7-2a75-42a8-a2bc-1358f860cd24 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.294815] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449492, 'name': PowerOffVM_Task, 'duration_secs': 0.240078} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.295092] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 641.296833] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 641.299854] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6743312b-d342-4e61-9e78-c0ad55f9caa7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.303777] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.304651] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 641.304906] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abede720-be3d-4b3f-ad22-55043a0d171f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.315956] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 641.315956] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52975d01-4db6-341a-d8b0-dcf55b0aeb6c" [ 641.315956] env[65503]: _type = "Task" [ 641.315956] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.316268] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 641.319236] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48856f2a-95ca-49e7-8cf1-0f8102725f1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.333728] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52975d01-4db6-341a-d8b0-dcf55b0aeb6c, 'name': SearchDatastore_Task, 'duration_secs': 0.015712} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.334831] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3175faea-b92b-42bc-9ed6-ac03a83ea12f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.352326] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 641.352326] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528077be-975c-db11-7c2b-7053d0931b11" [ 641.352326] env[65503]: _type = "Task" [ 641.352326] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.360715] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-174c806e-c2e8-4064-8800-d4a35c19f5e6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.360715] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-174c806e-c2e8-4064-8800-d4a35c19f5e6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.360715] env[65503]: DEBUG nova.network.neutron [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 641.367387] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528077be-975c-db11-7c2b-7053d0931b11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.379017] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 641.379017] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 641.379017] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Deleting the datastore file [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 641.379017] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7bb9832b-5d12-4bd4-ae6f-f7427271afb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.387929] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 641.387929] env[65503]: value = "task-4449495" [ 641.387929] env[65503]: _type = "Task" [ 641.387929] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.403598] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.407525] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449493, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.423018] env[65503]: WARNING neutronclient.v2_0.client [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 641.423805] env[65503]: WARNING openstack [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 641.424182] env[65503]: WARNING openstack [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 641.436889] env[65503]: DEBUG nova.compute.manager [req-25b44430-6efd-4116-8dc1-60ffee65282f req-ec8ca732-cdac-44d0-976a-e227a3f73335 service nova] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Received event network-vif-deleted-b5328355-0841-4441-9689-a82ea7088346 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 641.441183] env[65503]: DEBUG nova.policy [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '207cec6c61fd4a21990b4ee168ce2cee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c910e3ce5b564a7791fd980b3795932b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 641.509387] env[65503]: WARNING neutronclient.v2_0.client [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 641.509387] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 641.509610] env[65503]: WARNING openstack [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 641.671717] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 641.672151] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 641.865251] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528077be-975c-db11-7c2b-7053d0931b11, 'name': SearchDatastore_Task, 'duration_secs': 0.025117} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.865520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.865780] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9297d849-a966-48da-ba6a-453c42b99e44/9297d849-a966-48da-ba6a-453c42b99e44.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 641.866058] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc09005e-4e16-4e0e-9dad-70e06f894d02 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.868749] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 641.869115] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 641.882856] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 641.882856] env[65503]: value = "task-4449496" [ 641.882856] env[65503]: _type = "Task" [ 641.882856] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.896482] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.903602] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175196} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.904331] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 641.904565] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 641.904765] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 641.910777] env[65503]: DEBUG oslo_vmware.api [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449493, 'name': PowerOnVM_Task, 'duration_secs': 0.59081} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.911267] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 641.911462] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-33d6f81f-fbca-4c58-8451-c8cdcef42c1f tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance '39266117-e82e-48ae-932a-be04b1a7351a' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 641.968263] env[65503]: DEBUG nova.network.neutron [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Successfully created port: 0d0ad262-a7c3-46b2-962a-ae6db1e6279d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 642.056577] env[65503]: DEBUG nova.network.neutron [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Updating instance_info_cache with network_info: [{"id": "88cbb4cd-7967-47a9-9cda-a3456020aefa", "address": "fa:16:3e:81:94:8d", "network": {"id": "668794e6-8271-493d-9479-e10ebcbc299b", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2113111945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a840b7f7e86745d79af6e9e1196e20fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "05b1253d-2b87-4158-9ff1-dafcf829f11f", "external-id": "nsx-vlan-transportzone-55", "segmentation_id": 55, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88cbb4cd-79", "ovs_interfaceid": "88cbb4cd-7967-47a9-9cda-a3456020aefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 642.098803] env[65503]: WARNING openstack [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 642.099352] env[65503]: WARNING openstack [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 642.156437] env[65503]: DEBUG nova.network.neutron [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 642.240353] env[65503]: DEBUG nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 642.284638] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 642.284980] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.287882] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 642.287882] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.287882] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 642.287882] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 642.287882] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.288277] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 642.288277] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 642.288277] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 642.288277] env[65503]: DEBUG nova.virt.hardware [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 642.288277] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dce9c2-81b1-4f0a-aaf9-15de73329059 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.306477] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107a61e2-991c-4f89-b8d0-76250d96ca34 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.378503] env[65503]: WARNING neutronclient.v2_0.client [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 642.379331] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 642.379803] env[65503]: WARNING openstack [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 642.407929] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449496, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.559226] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 642.559226] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 642.566140] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Releasing lock "refresh_cache-85d0ed1d-6306-4999-832b-f4e69233fec7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.566140] env[65503]: DEBUG nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Instance network_info: |[{"id": "88cbb4cd-7967-47a9-9cda-a3456020aefa", "address": "fa:16:3e:81:94:8d", "network": {"id": "668794e6-8271-493d-9479-e10ebcbc299b", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2113111945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a840b7f7e86745d79af6e9e1196e20fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "05b1253d-2b87-4158-9ff1-dafcf829f11f", "external-id": "nsx-vlan-transportzone-55", "segmentation_id": 55, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88cbb4cd-79", "ovs_interfaceid": "88cbb4cd-7967-47a9-9cda-a3456020aefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 642.569159] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:94:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '05b1253d-2b87-4158-9ff1-dafcf829f11f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '88cbb4cd-7967-47a9-9cda-a3456020aefa', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 642.576957] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Creating folder: Project (a840b7f7e86745d79af6e9e1196e20fa). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 642.577536] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23454079-b534-49b0-a25b-732cddbd35f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.590624] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Created folder: Project (a840b7f7e86745d79af6e9e1196e20fa) in parent group-v870190. [ 642.590771] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Creating folder: Instances. Parent ref: group-v870239. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 642.595310] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b95c1f14-eb87-4d19-b987-9ee6b23f4168 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.608273] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Created folder: Instances in parent group-v870239. [ 642.608348] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 642.608566] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 642.608781] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ffc27fa-60fc-4ff0-b5c0-86cef0f1f96c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.635121] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 642.635121] env[65503]: value = "task-4449499" [ 642.635121] env[65503]: _type = "Task" [ 642.635121] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.649114] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449499, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.708997] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf061a8-906f-4d28-a2b7-9b0bc15ebdc0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.722797] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5427c0a6-a3a9-43b1-84c1-1d5270ad2791 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.759166] env[65503]: WARNING neutronclient.v2_0.client [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 642.759831] env[65503]: WARNING openstack [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 642.760221] env[65503]: WARNING openstack [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 642.768453] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8505bd00-490e-4cc3-a429-cfb6963ead16 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.780601] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576381e9-ee7b-409e-a2cd-bfe0a5cc6f20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.805039] env[65503]: DEBUG nova.compute.provider_tree [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.901569] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637361} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.902018] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9297d849-a966-48da-ba6a-453c42b99e44/9297d849-a966-48da-ba6a-453c42b99e44.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 642.902612] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.902612] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02009ed2-d1aa-48a7-b938-d676ab302f2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.911594] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 642.911594] env[65503]: value = "task-4449500" [ 642.911594] env[65503]: _type = "Task" [ 642.911594] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.931208] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.962853] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 642.963041] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.963222] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 642.963488] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.963659] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 642.963839] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 642.964143] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.964398] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 642.964744] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 642.964915] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 642.965155] env[65503]: DEBUG nova.virt.hardware [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 642.966523] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5735b90f-05aa-4e91-856e-5c7365af8352 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.980251] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a51e73-b112-4354-9a93-8762b4efac6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.001198] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.008711] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 643.009154] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.009436] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2227f956-075a-498a-926a-a266e5c69e8d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.031457] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.031457] env[65503]: value = "task-4449501" [ 643.031457] env[65503]: _type = "Task" [ 643.031457] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.042894] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449501, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.149193] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449499, 'name': CreateVM_Task, 'duration_secs': 0.434364} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.149587] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 643.150408] env[65503]: WARNING neutronclient.v2_0.client [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 643.150408] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.150560] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.150881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 643.151232] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34f0373e-2868-4b32-bf47-a69f5391edb0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.157045] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 643.157045] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ae4de3-879e-a500-cbfe-14266b294ae5" [ 643.157045] env[65503]: _type = "Task" [ 643.157045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.170393] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ae4de3-879e-a500-cbfe-14266b294ae5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.309152] env[65503]: DEBUG nova.scheduler.client.report [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 643.400752] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Updated VIF entry in instance network info cache for port 9f691f3d-5247-46d4-b46b-6840c2cc557d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 643.400868] env[65503]: DEBUG nova.network.neutron [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Updating instance_info_cache with network_info: [{"id": "9f691f3d-5247-46d4-b46b-6840c2cc557d", "address": "fa:16:3e:0c:86:0e", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f691f3d-52", "ovs_interfaceid": "9f691f3d-5247-46d4-b46b-6840c2cc557d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 643.428051] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10497} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.428051] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.429382] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e1d016-b8bf-48ef-9605-64d265bf62db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.471516] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 9297d849-a966-48da-ba6a-453c42b99e44/9297d849-a966-48da-ba6a-453c42b99e44.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.482255] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19b1729c-868b-4a7f-b7b1-eabf2aa06565 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.497598] env[65503]: WARNING neutronclient.v2_0.client [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 643.498270] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 643.498711] env[65503]: WARNING openstack [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 643.515098] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 643.515098] env[65503]: value = "task-4449502" [ 643.515098] env[65503]: _type = "Task" [ 643.515098] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.527509] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449502, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.543434] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449501, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.671761] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ae4de3-879e-a500-cbfe-14266b294ae5, 'name': SearchDatastore_Task, 'duration_secs': 0.023759} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.671940] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.672265] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.672594] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.672789] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.672996] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 643.673943] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe2f6227-4aa5-4f99-b646-23553028c513 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.687417] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 643.687628] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 643.688796] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-250e2cc4-b735-4fb9-8988-1cfa25d6c757 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.696463] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 643.696463] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52244a67-efc4-0a18-23f0-3f945c053228" [ 643.696463] env[65503]: _type = "Task" [ 643.696463] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.709276] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52244a67-efc4-0a18-23f0-3f945c053228, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.816718] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.817152] env[65503]: DEBUG nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 643.820640] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.497s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.822182] env[65503]: INFO nova.compute.claims [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.857631] env[65503]: DEBUG nova.network.neutron [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Updated VIF entry in instance network info cache for port 74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 643.858028] env[65503]: DEBUG nova.network.neutron [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Updating instance_info_cache with network_info: [{"id": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "address": "fa:16:3e:26:d2:17", "network": {"id": "d504c3f2-514b-4ccc-a7e0-1813e7cfece6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1527786722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72a3774600024de1b1347117fd020278", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74b1a7c2-cf", "ovs_interfaceid": "74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 643.868205] env[65503]: DEBUG nova.network.neutron [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Successfully updated port: 0d0ad262-a7c3-46b2-962a-ae6db1e6279d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 643.904264] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d85e487-60c3-4f5f-8f86-d16a33c48ceb req-16ee2726-7d74-4e02-a523-1fd7cf33be18 service nova] Releasing lock "refresh_cache-1c598208-a4d0-46b8-9a9c-107353e957b9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.030802] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449502, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.046737] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449501, 'name': CreateVM_Task, 'duration_secs': 0.516642} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.046970] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.047681] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.047681] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.048069] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 644.048326] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7bb93e0-7265-4513-98d0-76baeb594193 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.056871] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 644.056871] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52123331-7646-0590-c7b1-0459d49242ab" [ 644.056871] env[65503]: _type = "Task" [ 644.056871] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.067072] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52123331-7646-0590-c7b1-0459d49242ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.106335] env[65503]: DEBUG nova.network.neutron [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Updating instance_info_cache with network_info: [{"id": "831f4b30-3858-4674-b6f1-f7f00241ca20", "address": "fa:16:3e:8f:df:6e", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap831f4b30-38", "ovs_interfaceid": "831f4b30-3858-4674-b6f1-f7f00241ca20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 644.215272] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52244a67-efc4-0a18-23f0-3f945c053228, 'name': SearchDatastore_Task, 'duration_secs': 0.011917} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.216595] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec27e8f2-aea0-4720-af98-6585c5d54d98 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.228318] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 644.228318] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52467147-f950-3077-997a-127c56e6d058" [ 644.228318] env[65503]: _type = "Task" [ 644.228318] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.240686] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52467147-f950-3077-997a-127c56e6d058, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.327645] env[65503]: DEBUG nova.compute.utils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 644.331424] env[65503]: DEBUG nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 644.331950] env[65503]: DEBUG nova.network.neutron [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 644.332080] env[65503]: WARNING neutronclient.v2_0.client [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 644.332402] env[65503]: WARNING neutronclient.v2_0.client [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 644.332942] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 644.334613] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 644.361593] env[65503]: DEBUG oslo_concurrency.lockutils [req-b1fd4dbe-9dd6-4d8b-a1eb-024b4c0c8118 req-1a37c66b-6d18-41a9-95f7-d0488381240a service nova] Releasing lock "refresh_cache-db942a2d-671b-4036-a80b-d2375145cd29" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.371567] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "refresh_cache-d8d917c1-224c-4773-a911-d09f3f719e1b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.372188] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquired lock "refresh_cache-d8d917c1-224c-4773-a911-d09f3f719e1b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.372188] env[65503]: DEBUG nova.network.neutron [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 644.387817] env[65503]: DEBUG nova.policy [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '235d0527ebce4426896eaee338e968c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '85db5c8415744defa437bd02a64803e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 644.532813] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449502, 'name': ReconfigVM_Task, 'duration_secs': 0.535402} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.533210] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 9297d849-a966-48da-ba6a-453c42b99e44/9297d849-a966-48da-ba6a-453c42b99e44.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 644.534528] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64a85c20-d5e9-4ada-bf5e-84f236f0b1bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.543802] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 644.543802] env[65503]: value = "task-4449503" [ 644.543802] env[65503]: _type = "Task" [ 644.543802] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.559257] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449503, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.571681] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52123331-7646-0590-c7b1-0459d49242ab, 'name': SearchDatastore_Task, 'duration_secs': 0.018611} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.572243] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.572560] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.572883] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.611460] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-174c806e-c2e8-4064-8800-d4a35c19f5e6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.612233] env[65503]: DEBUG nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Instance network_info: |[{"id": "831f4b30-3858-4674-b6f1-f7f00241ca20", "address": "fa:16:3e:8f:df:6e", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap831f4b30-38", "ovs_interfaceid": "831f4b30-3858-4674-b6f1-f7f00241ca20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 644.614384] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:df:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '831f4b30-3858-4674-b6f1-f7f00241ca20', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.622686] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating folder: Project (463e93d05e1e4b27a3dc866a5b1991d0). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.623767] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e88034b-8997-49ee-9687-0f380e6404d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.636779] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created folder: Project (463e93d05e1e4b27a3dc866a5b1991d0) in parent group-v870190. [ 644.637034] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating folder: Instances. Parent ref: group-v870243. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.637323] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9682be2b-de17-4f4c-add2-ac692e84d0ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.650170] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created folder: Instances in parent group-v870243. [ 644.650506] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 644.651091] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 644.651356] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caf3f806-e007-4367-83cc-6fc2b719317e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.675648] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.675648] env[65503]: value = "task-4449506" [ 644.675648] env[65503]: _type = "Task" [ 644.675648] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.692776] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449506, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.742435] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52467147-f950-3077-997a-127c56e6d058, 'name': SearchDatastore_Task, 'duration_secs': 0.018963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.742435] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.742945] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 85d0ed1d-6306-4999-832b-f4e69233fec7/85d0ed1d-6306-4999-832b-f4e69233fec7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 644.742945] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.745995] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.745995] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-053e0863-5d95-404d-b6e4-5f4b31c25671 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.745995] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa465016-06b2-437d-8067-a8877b87f246 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.754771] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 644.754771] env[65503]: value = "task-4449507" [ 644.754771] env[65503]: _type = "Task" [ 644.754771] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.760646] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.761165] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.762443] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-516a36b1-0a1a-4ee0-86ac-b3f4e09a70b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.769716] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.774353] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 644.774353] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5264d03b-bcd7-071b-73e2-1589e5624357" [ 644.774353] env[65503]: _type = "Task" [ 644.774353] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.787180] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5264d03b-bcd7-071b-73e2-1589e5624357, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.811612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.811883] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.833079] env[65503]: DEBUG nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 644.836853] env[65503]: DEBUG nova.network.neutron [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Successfully created port: 24cf9d48-62c6-4756-bdcc-5008383a037b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 644.875252] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 644.875648] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 645.067316] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449503, 'name': Rename_Task, 'duration_secs': 0.224739} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.071684] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.072514] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c58c6b7-8895-4a87-9ee5-469b98ab239a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.083945] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 645.083945] env[65503]: value = "task-4449508" [ 645.083945] env[65503]: _type = "Task" [ 645.083945] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.102164] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.192299] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449506, 'name': CreateVM_Task, 'duration_secs': 0.47035} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.192538] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 645.193174] env[65503]: WARNING neutronclient.v2_0.client [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 645.193450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.193596] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.193932] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 645.194248] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc568a9d-5d15-4450-a76f-398c976a7b6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.201536] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 645.201536] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52509236-278f-cc17-63c0-9bb89c516cf8" [ 645.201536] env[65503]: _type = "Task" [ 645.201536] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.217464] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52509236-278f-cc17-63c0-9bb89c516cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.269951] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449507, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.290418] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5264d03b-bcd7-071b-73e2-1589e5624357, 'name': SearchDatastore_Task, 'duration_secs': 0.017426} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.296109] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0aeedce-3d3a-4b08-81ce-8a5f51979ffa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.306180] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 645.306180] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5203e079-fe8d-8d64-ea13-738efdd67d61" [ 645.306180] env[65503]: _type = "Task" [ 645.306180] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.317578] env[65503]: DEBUG nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 645.332427] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5203e079-fe8d-8d64-ea13-738efdd67d61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.357506] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "fd548bd7-b686-43ef-83a7-c40addf8ba75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.357892] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.443026] env[65503]: DEBUG nova.network.neutron [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 645.479508] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c0b857-648f-48bc-9b03-9ea2b6fa98a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.493284] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e7e962-e283-41ed-ab06-67b0469898d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.538933] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c47a39-131e-4b14-9dfd-2cfc5da191fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.550036] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75065d9a-e869-4a72-8cc5-16e02a5c2ddb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.568901] env[65503]: DEBUG nova.compute.provider_tree [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.596800] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449508, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.716781] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52509236-278f-cc17-63c0-9bb89c516cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.06676} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.717184] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.717482] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.717753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.768741] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449507, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642705} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.768741] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 85d0ed1d-6306-4999-832b-f4e69233fec7/85d0ed1d-6306-4999-832b-f4e69233fec7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 645.768741] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.768741] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df16f350-f219-42b9-bc14-a5b8d5f05e90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.776952] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 645.776952] env[65503]: value = "task-4449513" [ 645.776952] env[65503]: _type = "Task" [ 645.776952] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.789550] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.817661] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5203e079-fe8d-8d64-ea13-738efdd67d61, 'name': SearchDatastore_Task, 'duration_secs': 0.061481} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.817949] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.818219] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.818889] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.818889] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 645.819026] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee389bab-4710-4b54-b906-f4f6cb8e1630 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.821432] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62a40229-93ca-4459-8f2e-bdef99f2c956 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.833180] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 645.833180] env[65503]: value = "task-4449514" [ 645.833180] env[65503]: _type = "Task" [ 645.833180] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.835535] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 645.835535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 645.840055] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56be3f3e-5098-4066-8cc9-2f17e613e81b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.851059] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.851406] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 645.851406] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529a1ab9-8d90-7d76-2590-dadb71dca5f6" [ 645.851406] env[65503]: _type = "Task" [ 645.851406] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.852469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.853657] env[65503]: DEBUG nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 645.866015] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529a1ab9-8d90-7d76-2590-dadb71dca5f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.870661] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 645.871058] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 645.887893] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 645.888195] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 645.888357] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 645.888507] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 645.888653] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 645.888794] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 645.888999] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 645.889190] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 645.889352] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 645.889516] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 645.889710] env[65503]: DEBUG nova.virt.hardware [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 645.890704] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4d3f4b-1a9b-4c06-a500-54e99b32d736 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.902684] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197a6f7e-ca8a-4d5f-9e1d-6969a2054c7e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.070937] env[65503]: DEBUG nova.scheduler.client.report [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.099912] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449508, 'name': PowerOnVM_Task} progress is 64%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.289924] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078797} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.290280] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.291252] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9645c77-4234-45f9-beba-fddd504e8a90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.320823] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 85d0ed1d-6306-4999-832b-f4e69233fec7/85d0ed1d-6306-4999-832b-f4e69233fec7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.321626] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c273024a-af72-4e2c-aef3-2f5b14e83563 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.349489] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449514, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.351637] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 646.351637] env[65503]: value = "task-4449515" [ 646.351637] env[65503]: _type = "Task" [ 646.351637] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.365839] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449515, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.370117] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529a1ab9-8d90-7d76-2590-dadb71dca5f6, 'name': SearchDatastore_Task, 'duration_secs': 0.022187} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.371193] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e543b15-5a79-417a-be79-9136fb2cc49b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.380791] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 646.380791] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52820211-c666-0427-3835-7f5c684c1864" [ 646.380791] env[65503]: _type = "Task" [ 646.380791] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.392841] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52820211-c666-0427-3835-7f5c684c1864, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.574924] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e74cf6-d4f8-5a93-641e-f548bed32c94/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 646.575722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.576283] env[65503]: DEBUG nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 646.581520] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b34886-a5bb-41f9-a6c0-ce22ac6582bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.583051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.668s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.584494] env[65503]: INFO nova.compute.claims [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.593071] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e74cf6-d4f8-5a93-641e-f548bed32c94/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 646.593266] env[65503]: ERROR oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e74cf6-d4f8-5a93-641e-f548bed32c94/disk-0.vmdk due to incomplete transfer. [ 646.593959] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3194d4a3-c40a-41e4-9e7e-9140ae68cea2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.601615] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449508, 'name': PowerOnVM_Task} progress is 68%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.610191] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e74cf6-d4f8-5a93-641e-f548bed32c94/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 646.610412] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Uploaded image f4a072a1-0a6b-43fe-9278-cc47fc5f2852 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 646.614844] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 646.615649] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1254d740-ffa6-4a22-b37e-87f2dc98aeb0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.625619] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 646.625619] env[65503]: value = "task-4449516" [ 646.625619] env[65503]: _type = "Task" [ 646.625619] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.638102] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449516, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.850655] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449514, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626727} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.851100] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.852512] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.852964] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64813ac1-e4da-49b3-8fab-d57c6975cffe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.866991] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449515, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.868721] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 646.868721] env[65503]: value = "task-4449517" [ 646.868721] env[65503]: _type = "Task" [ 646.868721] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.878805] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.895750] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52820211-c666-0427-3835-7f5c684c1864, 'name': SearchDatastore_Task, 'duration_secs': 0.05001} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.896048] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.896412] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 174c806e-c2e8-4064-8800-d4a35c19f5e6/174c806e-c2e8-4064-8800-d4a35c19f5e6.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 646.896716] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c661aef4-089c-405f-ab50-33aa0e4efd6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.905440] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 646.905440] env[65503]: value = "task-4449518" [ 646.905440] env[65503]: _type = "Task" [ 646.905440] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.916443] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.976066] env[65503]: DEBUG nova.network.neutron [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Successfully updated port: 24cf9d48-62c6-4756-bdcc-5008383a037b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 647.085262] env[65503]: DEBUG nova.compute.utils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 647.089922] env[65503]: DEBUG nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 647.089922] env[65503]: DEBUG nova.network.neutron [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 647.089922] env[65503]: WARNING neutronclient.v2_0.client [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 647.089922] env[65503]: WARNING neutronclient.v2_0.client [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 647.089922] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 647.090296] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 647.112676] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449508, 'name': PowerOnVM_Task} progress is 82%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.138265] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449516, 'name': Destroy_Task, 'duration_secs': 0.305195} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.139846] env[65503]: WARNING neutronclient.v2_0.client [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 647.140697] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 647.141149] env[65503]: WARNING openstack [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 647.149032] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Destroyed the VM [ 647.150029] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 647.150360] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a3406d4e-1932-47b5-81b1-7ff5ff4cf63c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.161610] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 647.161610] env[65503]: value = "task-4449519" [ 647.161610] env[65503]: _type = "Task" [ 647.161610] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.172672] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449519, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.367756] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449515, 'name': ReconfigVM_Task, 'duration_secs': 0.988544} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.367756] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 85d0ed1d-6306-4999-832b-f4e69233fec7/85d0ed1d-6306-4999-832b-f4e69233fec7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 647.367756] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a3fd347-f0d3-4256-b423-7020743983ac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.382527] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.345859} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.384663] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 647.385326] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 647.385326] env[65503]: value = "task-4449520" [ 647.385326] env[65503]: _type = "Task" [ 647.385326] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.386301] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de73b099-0562-4730-b073-23559ef6935e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.403424] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449520, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.429934] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 647.434848] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eede2038-bf8c-4bca-85b9-a3e1495a83ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.466174] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 647.466174] env[65503]: value = "task-4449521" [ 647.466174] env[65503]: _type = "Task" [ 647.466174] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.466174] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449518, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.478158] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449521, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.478885] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "refresh_cache-16d508f9-72f0-4853-92fb-c8c7a37b5668" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.479146] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquired lock "refresh_cache-16d508f9-72f0-4853-92fb-c8c7a37b5668" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.479319] env[65503]: DEBUG nova.network.neutron [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 647.597037] env[65503]: DEBUG nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 647.616229] env[65503]: DEBUG nova.policy [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a470b09e462b4f2da9f55853d8cc0ff8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f1a7c2c68a498c8ffa173e9778d59d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 647.632845] env[65503]: DEBUG oslo_vmware.api [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449508, 'name': PowerOnVM_Task, 'duration_secs': 2.211026} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.633048] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 647.633320] env[65503]: INFO nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Took 13.07 seconds to spawn the instance on the hypervisor. [ 647.633484] env[65503]: DEBUG nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 647.634486] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647982a4-9ad6-49eb-80c8-fd41eb61902a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.695722] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449519, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.905388] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449520, 'name': Rename_Task, 'duration_secs': 0.494272} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.906073] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 647.906384] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57d894e2-4de0-4f11-bb37-51925b7a2675 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.916125] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 647.916125] env[65503]: value = "task-4449522" [ 647.916125] env[65503]: _type = "Task" [ 647.916125] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.936376] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.834562} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.936376] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.936376] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 174c806e-c2e8-4064-8800-d4a35c19f5e6/174c806e-c2e8-4064-8800-d4a35c19f5e6.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 647.936376] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 647.936376] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33de87db-8046-4405-9b33-646a4e0d8751 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.944167] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 647.944167] env[65503]: value = "task-4449523" [ 647.944167] env[65503]: _type = "Task" [ 647.944167] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.962753] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449523, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.983444] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 647.984230] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 648.008887] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.012524] env[65503]: DEBUG nova.network.neutron [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Updating instance_info_cache with network_info: [{"id": "0d0ad262-a7c3-46b2-962a-ae6db1e6279d", "address": "fa:16:3e:6d:c2:52", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d0ad262-a7", "ovs_interfaceid": "0d0ad262-a7c3-46b2-962a-ae6db1e6279d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 648.165990] env[65503]: INFO nova.compute.manager [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Took 32.93 seconds to build instance. [ 648.178010] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083bc14c-9b61-4887-bda8-aaa18642a62a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.191532] env[65503]: DEBUG oslo_vmware.api [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449519, 'name': RemoveSnapshot_Task, 'duration_secs': 0.660287} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.195369] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 648.195726] env[65503]: INFO nova.compute.manager [None req-ab894076-202e-4d04-a9bd-85c2d19b843d tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Took 14.02 seconds to snapshot the instance on the hypervisor. [ 648.201548] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46941d89-ea84-4ed8-ac89-2aef296847f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.250272] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6521ee2c-9cdc-448d-b9df-127d460d1952 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.260535] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3866a43-3a7c-43ad-95b6-f787210dfced {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.278113] env[65503]: DEBUG nova.compute.provider_tree [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.431519] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449522, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.455787] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449523, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078031} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.455982] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 648.457019] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d708f3e-a21d-4b2b-95c8-36a65c3030a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.481577] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 174c806e-c2e8-4064-8800-d4a35c19f5e6/174c806e-c2e8-4064-8800-d4a35c19f5e6.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 648.485200] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca38f8df-a0c0-439b-98aa-9441aab6e0ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.506588] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449521, 'name': ReconfigVM_Task, 'duration_secs': 0.707067} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.508327] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c/50f11559-b8c7-41a2-aa43-255a28ffa58c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.509764] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 648.509764] env[65503]: value = "task-4449524" [ 648.509764] env[65503]: _type = "Task" [ 648.509764] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.509764] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c124b7b-3ae5-48f1-a61a-6608394e6324 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.516996] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Releasing lock "refresh_cache-d8d917c1-224c-4773-a911-d09f3f719e1b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.517231] env[65503]: DEBUG nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Instance network_info: |[{"id": "0d0ad262-a7c3-46b2-962a-ae6db1e6279d", "address": "fa:16:3e:6d:c2:52", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d0ad262-a7", "ovs_interfaceid": "0d0ad262-a7c3-46b2-962a-ae6db1e6279d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 648.517563] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:c2:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d0ad262-a7c3-46b2-962a-ae6db1e6279d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.526399] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Creating folder: Project (c910e3ce5b564a7791fd980b3795932b). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 648.533534] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca9b0c9e-2092-4c7d-a0ed-7aceb3502c32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.536080] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 648.536080] env[65503]: value = "task-4449525" [ 648.536080] env[65503]: _type = "Task" [ 648.536080] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.537163] env[65503]: DEBUG nova.network.neutron [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Successfully created port: b9ca30d7-8bbd-483b-bc06-14bf5c43111c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 648.554091] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449525, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.556175] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Created folder: Project (c910e3ce5b564a7791fd980b3795932b) in parent group-v870190. [ 648.556366] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Creating folder: Instances. Parent ref: group-v870249. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 648.556784] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2badba8-0a7b-47f6-ad0a-0281d946862c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.571055] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Created folder: Instances in parent group-v870249. [ 648.571444] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 648.571639] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 648.571967] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-371a7846-cfb7-4b39-b093-a54e12ddd005 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.590883] env[65503]: DEBUG nova.network.neutron [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 648.602440] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 648.602440] env[65503]: value = "task-4449528" [ 648.602440] env[65503]: _type = "Task" [ 648.602440] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.609601] env[65503]: DEBUG nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 648.618105] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449528, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.671043] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 648.671043] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 648.671043] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 648.671402] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 648.671402] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 648.672011] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 648.675441] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 648.676096] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 648.676399] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 648.676895] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 648.678124] env[65503]: DEBUG nova.virt.hardware [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 648.680052] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b8d47ca-e994-4b8f-959b-186e2fc8953b tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "9297d849-a966-48da-ba6a-453c42b99e44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.286s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.682361] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9e3e81-8538-48fd-9ad1-94a7ded53f57 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.704571] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9cecec-4815-409d-8f4b-849e129cc214 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.783303] env[65503]: DEBUG nova.scheduler.client.report [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.937808] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449522, 'name': PowerOnVM_Task} progress is 64%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.024635] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449524, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.051930] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449525, 'name': Rename_Task, 'duration_secs': 0.261328} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.052463] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.052547] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f8f0edd-b963-4384-9974-9ec2362c52b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.061521] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Waiting for the task: (returnval){ [ 649.061521] env[65503]: value = "task-4449530" [ 649.061521] env[65503]: _type = "Task" [ 649.061521] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.071564] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.118985] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449528, 'name': CreateVM_Task, 'duration_secs': 0.445619} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.120610] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 649.120610] env[65503]: WARNING neutronclient.v2_0.client [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 649.120610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.120610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.121644] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 649.121644] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1df67e52-65be-4249-8e96-b8370145faec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.127176] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 649.127176] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260322e-540c-1b5f-6ea0-7d0bf7ebd994" [ 649.127176] env[65503]: _type = "Task" [ 649.127176] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.136905] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260322e-540c-1b5f-6ea0-7d0bf7ebd994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.191845] env[65503]: DEBUG nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 649.289777] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.290655] env[65503]: DEBUG nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 649.294654] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.734s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.297033] env[65503]: INFO nova.compute.claims [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.436835] env[65503]: DEBUG oslo_vmware.api [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449522, 'name': PowerOnVM_Task, 'duration_secs': 1.39274} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.437435] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 649.437775] env[65503]: INFO nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Took 12.33 seconds to spawn the instance on the hypervisor. [ 649.438121] env[65503]: DEBUG nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 649.439178] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcf0a36-f008-47cb-8fd3-49f7927c6397 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.523321] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449524, 'name': ReconfigVM_Task, 'duration_secs': 0.731146} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.523497] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 174c806e-c2e8-4064-8800-d4a35c19f5e6/174c806e-c2e8-4064-8800-d4a35c19f5e6.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 649.524485] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e6c3a8a-f2f8-4659-a004-7b1b8322792b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.534301] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 649.534301] env[65503]: value = "task-4449531" [ 649.534301] env[65503]: _type = "Task" [ 649.534301] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.544319] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449531, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.575832] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449530, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.647356] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260322e-540c-1b5f-6ea0-7d0bf7ebd994, 'name': SearchDatastore_Task, 'duration_secs': 0.053914} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.647764] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.648016] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 649.648286] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.648774] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.648774] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.650038] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8800644-785f-4880-8805-966bfa60067e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.669741] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.669897] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 649.670763] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab41d26-a60c-41b3-86b8-94a44eba975f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.682010] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 649.682010] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524cd015-0f76-ef7f-8739-4f9f515625de" [ 649.682010] env[65503]: _type = "Task" [ 649.682010] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.705420] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524cd015-0f76-ef7f-8739-4f9f515625de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.726280] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.803581] env[65503]: DEBUG nova.compute.utils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 649.807968] env[65503]: DEBUG nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 649.808778] env[65503]: DEBUG nova.network.neutron [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 649.808778] env[65503]: WARNING neutronclient.v2_0.client [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 649.808778] env[65503]: WARNING neutronclient.v2_0.client [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 649.811312] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 649.811312] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 649.948679] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 649.949718] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 649.968619] env[65503]: INFO nova.compute.manager [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Took 33.64 seconds to build instance. [ 650.049365] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449531, 'name': Rename_Task, 'duration_secs': 0.318382} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.054114] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 650.055464] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Received event network-vif-plugged-ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 650.056497] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquiring lock "9297d849-a966-48da-ba6a-453c42b99e44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.056497] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Lock "9297d849-a966-48da-ba6a-453c42b99e44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.056497] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Lock "9297d849-a966-48da-ba6a-453c42b99e44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.056497] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] No waiting events found dispatching network-vif-plugged-ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 650.057028] env[65503]: WARNING nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Received unexpected event network-vif-plugged-ea1023c2-b650-4073-a973-29291b753f53 for instance with vm_state active and task_state None. [ 650.057028] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Received event network-changed-ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 650.057229] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Refreshing instance network info cache due to event network-changed-ea1023c2-b650-4073-a973-29291b753f53. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 650.057506] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquiring lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.057598] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquired lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.057755] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Refreshing network info cache for port ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 650.059823] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad37bec2-e507-45d9-8c0b-583aafc2532e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.074024] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 650.074024] env[65503]: value = "task-4449532" [ 650.074024] env[65503]: _type = "Task" [ 650.074024] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.084418] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449530, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.091870] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449532, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.152700] env[65503]: DEBUG nova.policy [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ddb7dc8858e4aa09c61dc232cb465eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5de0ae091db74426975a523e945110fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 650.197045] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524cd015-0f76-ef7f-8739-4f9f515625de, 'name': SearchDatastore_Task, 'duration_secs': 0.042009} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.198296] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5ca8800-7def-49e0-bacc-90904913442e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.205774] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 650.205774] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52586a62-3d9e-6fca-7162-4a29426a003c" [ 650.205774] env[65503]: _type = "Task" [ 650.205774] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.218169] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52586a62-3d9e-6fca-7162-4a29426a003c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.309674] env[65503]: DEBUG nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 650.359523] env[65503]: DEBUG nova.network.neutron [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Successfully updated port: b9ca30d7-8bbd-483b-bc06-14bf5c43111c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 650.474318] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f504a629-1f84-4aee-b004-418f0c5cc616 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.686s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.562621] env[65503]: WARNING neutronclient.v2_0.client [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 650.563337] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 650.563767] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 650.589939] env[65503]: DEBUG oslo_vmware.api [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Task: {'id': task-4449530, 'name': PowerOnVM_Task, 'duration_secs': 1.429549} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.597428] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 650.597428] env[65503]: DEBUG nova.compute.manager [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 650.597525] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449532, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.599303] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5540f9-f956-4454-b5c3-b37e0ffbba8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.724293] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52586a62-3d9e-6fca-7162-4a29426a003c, 'name': SearchDatastore_Task, 'duration_secs': 0.014522} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.724293] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.724932] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] d8d917c1-224c-4773-a911-d09f3f719e1b/d8d917c1-224c-4773-a911-d09f3f719e1b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 650.724932] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91647357-b276-4c38-9731-8293f032e884 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.735954] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 650.735954] env[65503]: value = "task-4449533" [ 650.735954] env[65503]: _type = "Task" [ 650.735954] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.747827] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.797894] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07afd1ce-3b36-4771-a903-551de4802c39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.807126] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7554498-40d9-4e7f-b71b-28781c661b1c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.847592] env[65503]: DEBUG nova.network.neutron [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Successfully created port: 0349cd73-74a3-45ee-9582-091d2fe091f9 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 650.850682] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1630415-08a0-4be1-985d-c26346a89622 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.863998] env[65503]: WARNING neutronclient.v2_0.client [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 650.863998] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 650.863998] env[65503]: WARNING openstack [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 650.872221] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae91f8d-71e5-4473-b721-9fbb54f6d7c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.877410] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "refresh_cache-24e054d7-7662-47ef-8f69-4738c5ff9548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.877552] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired lock "refresh_cache-24e054d7-7662-47ef-8f69-4738c5ff9548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.877712] env[65503]: DEBUG nova.network.neutron [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 650.891115] env[65503]: DEBUG nova.compute.provider_tree [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.935550] env[65503]: DEBUG nova.compute.manager [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Received event network-vif-plugged-831f4b30-3858-4674-b6f1-f7f00241ca20 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 650.935730] env[65503]: DEBUG oslo_concurrency.lockutils [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Acquiring lock "174c806e-c2e8-4064-8800-d4a35c19f5e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.935968] env[65503]: DEBUG oslo_concurrency.lockutils [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.936295] env[65503]: DEBUG oslo_concurrency.lockutils [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.936484] env[65503]: DEBUG nova.compute.manager [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] No waiting events found dispatching network-vif-plugged-831f4b30-3858-4674-b6f1-f7f00241ca20 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 650.936661] env[65503]: WARNING nova.compute.manager [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Received unexpected event network-vif-plugged-831f4b30-3858-4674-b6f1-f7f00241ca20 for instance with vm_state building and task_state spawning. [ 650.936843] env[65503]: DEBUG nova.compute.manager [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Received event network-changed-831f4b30-3858-4674-b6f1-f7f00241ca20 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 650.936978] env[65503]: DEBUG nova.compute.manager [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Refreshing instance network info cache due to event network-changed-831f4b30-3858-4674-b6f1-f7f00241ca20. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 650.937218] env[65503]: DEBUG oslo_concurrency.lockutils [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Acquiring lock "refresh_cache-174c806e-c2e8-4064-8800-d4a35c19f5e6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.937414] env[65503]: DEBUG oslo_concurrency.lockutils [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Acquired lock "refresh_cache-174c806e-c2e8-4064-8800-d4a35c19f5e6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.937643] env[65503]: DEBUG nova.network.neutron [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Refreshing network info cache for port 831f4b30-3858-4674-b6f1-f7f00241ca20 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 651.087941] env[65503]: DEBUG oslo_vmware.api [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449532, 'name': PowerOnVM_Task, 'duration_secs': 0.942019} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.092127] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.092127] env[65503]: INFO nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Took 11.43 seconds to spawn the instance on the hypervisor. [ 651.092127] env[65503]: DEBUG nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 651.092127] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ef8326-6fd0-4e43-9d14-8eb157bacd9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.122709] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.252576] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449533, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.349132] env[65503]: DEBUG nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 651.381678] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 651.382131] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 651.392707] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 651.393327] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 651.393327] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 651.393468] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 651.393954] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 651.393954] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 651.393954] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.394160] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 651.394279] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 651.394438] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 651.394603] env[65503]: DEBUG nova.virt.hardware [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 651.395566] env[65503]: DEBUG nova.scheduler.client.report [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 651.399727] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1358c77-c764-48b9-923e-b1b7cf4ad487 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.412048] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045d9359-795a-41f0-91b3-19c93e338ed9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.443049] env[65503]: WARNING neutronclient.v2_0.client [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 651.444656] env[65503]: WARNING openstack [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 651.444656] env[65503]: WARNING openstack [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 651.611334] env[65503]: INFO nova.compute.manager [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Took 30.30 seconds to build instance. [ 651.751230] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.858962} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.751526] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] d8d917c1-224c-4773-a911-d09f3f719e1b/d8d917c1-224c-4773-a911-d09f3f719e1b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 651.751768] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 651.752787] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce17dc77-22bb-4f8c-b133-c64cd5079d04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.759994] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 651.759994] env[65503]: value = "task-4449535" [ 651.759994] env[65503]: _type = "Task" [ 651.759994] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.770893] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.831028] env[65503]: DEBUG nova.network.neutron [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Updating instance_info_cache with network_info: [{"id": "24cf9d48-62c6-4756-bdcc-5008383a037b", "address": "fa:16:3e:35:d4:3c", "network": {"id": "a17078ee-d882-4b40-b077-5517204ce959", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1994350859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85db5c8415744defa437bd02a64803e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24cf9d48-62", "ovs_interfaceid": "24cf9d48-62c6-4756-bdcc-5008383a037b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 651.846047] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 651.846047] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 651.905933] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.906328] env[65503]: DEBUG nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 651.911012] env[65503]: DEBUG nova.network.neutron [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 651.915236] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.713s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.915236] env[65503]: DEBUG nova.objects.instance [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lazy-loading 'resources' on Instance uuid 2dbc3860-c65c-4cbb-8d90-f1f74420e652 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.114669] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b9380d6-7027-49b7-abc3-c62c89a29c6f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.809s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.180337] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 652.180845] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 652.273039] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224039} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.273272] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 652.274285] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415dde1f-5607-47ea-8d27-7b0e8924d6b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.304207] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] d8d917c1-224c-4773-a911-d09f3f719e1b/d8d917c1-224c-4773-a911-d09f3f719e1b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 652.304592] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cc48522-d4aa-4460-89ee-724c84c791e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.329183] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 652.329183] env[65503]: value = "task-4449536" [ 652.329183] env[65503]: _type = "Task" [ 652.329183] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.335347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Releasing lock "refresh_cache-16d508f9-72f0-4853-92fb-c8c7a37b5668" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.335347] env[65503]: DEBUG nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Instance network_info: |[{"id": "24cf9d48-62c6-4756-bdcc-5008383a037b", "address": "fa:16:3e:35:d4:3c", "network": {"id": "a17078ee-d882-4b40-b077-5517204ce959", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1994350859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85db5c8415744defa437bd02a64803e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24cf9d48-62", "ovs_interfaceid": "24cf9d48-62c6-4756-bdcc-5008383a037b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 652.335715] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:d4:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24cf9d48-62c6-4756-bdcc-5008383a037b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.343661] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Creating folder: Project (85db5c8415744defa437bd02a64803e4). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.344533] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a28e1098-bb33-4c21-acb7-9d2b89b20ba6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.354799] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.357104] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Created folder: Project (85db5c8415744defa437bd02a64803e4) in parent group-v870190. [ 652.357417] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Creating folder: Instances. Parent ref: group-v870252. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.357866] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-088c29c9-9969-4a18-a80e-f418f444fc41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.373800] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Created folder: Instances in parent group-v870252. [ 652.374095] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 652.374335] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.374566] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c57dff6a-7774-4bf9-bd1a-d29c0d7d11b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.404943] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.404943] env[65503]: value = "task-4449539" [ 652.404943] env[65503]: _type = "Task" [ 652.404943] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.419641] env[65503]: DEBUG nova.compute.utils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 652.421345] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449539, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.421955] env[65503]: DEBUG nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 652.648212] env[65503]: WARNING neutronclient.v2_0.client [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 652.650012] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 652.650660] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 652.683825] env[65503]: WARNING openstack [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 652.684509] env[65503]: WARNING openstack [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 652.705443] env[65503]: DEBUG nova.network.neutron [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Successfully updated port: 0349cd73-74a3-45ee-9582-091d2fe091f9 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 652.753836] env[65503]: WARNING neutronclient.v2_0.client [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 652.754663] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 652.755188] env[65503]: WARNING openstack [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 652.846306] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.895244] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Updated VIF entry in instance network info cache for port ea1023c2-b650-4073-a973-29291b753f53. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 652.896683] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Updating instance_info_cache with network_info: [{"id": "ea1023c2-b650-4073-a973-29291b753f53", "address": "fa:16:3e:dc:11:e9", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea1023c2-b6", "ovs_interfaceid": "ea1023c2-b650-4073-a973-29291b753f53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 652.923315] env[65503]: DEBUG nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 652.926909] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449539, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.935044] env[65503]: WARNING neutronclient.v2_0.client [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 652.935044] env[65503]: WARNING openstack [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 652.935044] env[65503]: WARNING openstack [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 652.995373] env[65503]: DEBUG nova.network.neutron [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Updating instance_info_cache with network_info: [{"id": "b9ca30d7-8bbd-483b-bc06-14bf5c43111c", "address": "fa:16:3e:01:91:2d", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.213", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9ca30d7-8b", "ovs_interfaceid": "b9ca30d7-8bbd-483b-bc06-14bf5c43111c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 653.005561] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a1e860-25a0-483d-b34c-d5f6c071f1c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.016046] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528b5108-442d-4db0-8aab-ab4525e26fcb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.024867] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.024960] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.059841] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a45e5bc-6551-4328-98d7-a53026723480 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.070424] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bda614d-22c6-4b34-93b4-afea706fb247 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.084181] env[65503]: DEBUG nova.compute.provider_tree [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.163213] env[65503]: DEBUG nova.network.neutron [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Updated VIF entry in instance network info cache for port 831f4b30-3858-4674-b6f1-f7f00241ca20. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 653.163620] env[65503]: DEBUG nova.network.neutron [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Updating instance_info_cache with network_info: [{"id": "831f4b30-3858-4674-b6f1-f7f00241ca20", "address": "fa:16:3e:8f:df:6e", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap831f4b30-38", "ovs_interfaceid": "831f4b30-3858-4674-b6f1-f7f00241ca20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 653.208019] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "refresh_cache-9dbaff4f-ab02-481b-b51f-b134021d277c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.208259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "refresh_cache-9dbaff4f-ab02-481b-b51f-b134021d277c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.208391] env[65503]: DEBUG nova.network.neutron [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 653.342785] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449536, 'name': ReconfigVM_Task, 'duration_secs': 0.653759} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.343248] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Reconfigured VM instance instance-00000013 to attach disk [datastore1] d8d917c1-224c-4773-a911-d09f3f719e1b/d8d917c1-224c-4773-a911-d09f3f719e1b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 653.343997] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c805b3ff-c602-4c98-be79-eb3ddbcbeb17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.353935] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 653.353935] env[65503]: value = "task-4449541" [ 653.353935] env[65503]: _type = "Task" [ 653.353935] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.366977] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449541, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.398993] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Releasing lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.399318] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Received event network-vif-plugged-88cbb4cd-7967-47a9-9cda-a3456020aefa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 653.399511] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquiring lock "85d0ed1d-6306-4999-832b-f4e69233fec7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.399723] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.399869] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.400055] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] No waiting events found dispatching network-vif-plugged-88cbb4cd-7967-47a9-9cda-a3456020aefa {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 653.400200] env[65503]: WARNING nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Received unexpected event network-vif-plugged-88cbb4cd-7967-47a9-9cda-a3456020aefa for instance with vm_state active and task_state None. [ 653.400359] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Received event network-changed-88cbb4cd-7967-47a9-9cda-a3456020aefa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 653.400508] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Refreshing instance network info cache due to event network-changed-88cbb4cd-7967-47a9-9cda-a3456020aefa. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 653.400802] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquiring lock "refresh_cache-85d0ed1d-6306-4999-832b-f4e69233fec7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.401614] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquired lock "refresh_cache-85d0ed1d-6306-4999-832b-f4e69233fec7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.401614] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Refreshing network info cache for port 88cbb4cd-7967-47a9-9cda-a3456020aefa {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 653.422385] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449539, 'name': CreateVM_Task, 'duration_secs': 0.599939} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.422789] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.423364] env[65503]: WARNING neutronclient.v2_0.client [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 653.425191] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.425511] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.426044] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 653.426938] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bedd4d6-bca9-439a-a6b2-b9e51386ccef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.444441] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 653.444441] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521ded91-f116-2ae4-0159-3735733c1c0f" [ 653.444441] env[65503]: _type = "Task" [ 653.444441] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.455618] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521ded91-f116-2ae4-0159-3735733c1c0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.467241] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "39266117-e82e-48ae-932a-be04b1a7351a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.467241] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.467241] env[65503]: DEBUG nova.compute.manager [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Going to confirm migration 1 {{(pid=65503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 653.502795] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Releasing lock "refresh_cache-24e054d7-7662-47ef-8f69-4738c5ff9548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.504670] env[65503]: DEBUG nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Instance network_info: |[{"id": "b9ca30d7-8bbd-483b-bc06-14bf5c43111c", "address": "fa:16:3e:01:91:2d", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.213", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9ca30d7-8b", "ovs_interfaceid": "b9ca30d7-8bbd-483b-bc06-14bf5c43111c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 653.505488] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:91:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9ca30d7-8bbd-483b-bc06-14bf5c43111c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.515255] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 653.515255] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.515255] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09b47cc3-8ca0-4b52-84db-fc12166b2d9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.530980] env[65503]: DEBUG nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 653.540883] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.540883] env[65503]: value = "task-4449542" [ 653.540883] env[65503]: _type = "Task" [ 653.540883] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.553781] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449542, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.587703] env[65503]: DEBUG nova.scheduler.client.report [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.667663] env[65503]: DEBUG oslo_concurrency.lockutils [req-c2078bac-4401-42ce-9b2d-30c209c196e1 req-fca82ca3-acaf-4b79-a4f7-7cc2e1d0bebc service nova] Releasing lock "refresh_cache-174c806e-c2e8-4064-8800-d4a35c19f5e6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.717348] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 653.717660] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 653.867139] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449541, 'name': Rename_Task, 'duration_secs': 0.277013} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.867787] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 653.868138] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fac4d4b-7482-4950-8edc-c1a037480807 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.878260] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 653.878260] env[65503]: value = "task-4449543" [ 653.878260] env[65503]: _type = "Task" [ 653.878260] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.891230] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449543, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.904960] env[65503]: WARNING neutronclient.v2_0.client [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 653.905655] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 653.906030] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 653.943106] env[65503]: DEBUG nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 653.959398] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521ded91-f116-2ae4-0159-3735733c1c0f, 'name': SearchDatastore_Task, 'duration_secs': 0.020634} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.960039] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.960435] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.960686] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.960885] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.961113] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.961360] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d998a5c5-f26a-4d03-af77-4d6d3c31125b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.980485] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 653.980775] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.980953] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 653.981107] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.981249] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 653.981424] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 653.982105] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 653.982105] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 653.982105] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 653.982105] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 653.982375] env[65503]: DEBUG nova.virt.hardware [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 653.984679] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a6db95-abdc-4a7d-a4dd-524a0cfb2fa3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.987518] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.987685] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.988992] env[65503]: WARNING neutronclient.v2_0.client [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 653.991146] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c52ff6e8-b1c8-4640-9ab2-13a8a3681506 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.695564] env[65503]: DEBUG nova.network.neutron [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 654.703661] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.790s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.714923] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.623s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.716707] env[65503]: INFO nova.compute.claims [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.724967] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f607d9-0b67-43b2-a9c8-68d65da6bac2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.733375] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 654.733375] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210bfc4-8bc6-6d94-7336-2d92d72bee90" [ 654.733375] env[65503]: _type = "Task" [ 654.733375] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.735066] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.743474] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449542, 'name': CreateVM_Task, 'duration_secs': 0.415789} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.745291] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.746219] env[65503]: WARNING neutronclient.v2_0.client [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 654.746612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.746759] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.747088] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 654.747760] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80bbf9e0-fce6-48f8-8880-819f26fc2601 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.756314] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210bfc4-8bc6-6d94-7336-2d92d72bee90, 'name': SearchDatastore_Task, 'duration_secs': 0.017119} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.756314] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449543, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.765740] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 654.770756] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Creating folder: Project (f708e2e3c20f450f9659d2b94c06659d). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 654.772991] env[65503]: INFO nova.scheduler.client.report [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Deleted allocations for instance 2dbc3860-c65c-4cbb-8d90-f1f74420e652 [ 654.774035] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b9c3dc8-c2ec-46bb-a533-e35d9e768dbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.776854] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2a84960-9d88-42c1-a3bf-7fdeb71b4992 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.779045] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 654.779045] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52108cd6-0e7d-baf7-ef29-c8da6116532f" [ 654.779045] env[65503]: _type = "Task" [ 654.779045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.786420] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 654.786420] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525aa67e-1ff9-1d24-2726-b47faf52172d" [ 654.786420] env[65503]: _type = "Task" [ 654.786420] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.795529] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52108cd6-0e7d-baf7-ef29-c8da6116532f, 'name': SearchDatastore_Task, 'duration_secs': 0.016382} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.797091] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Created folder: Project (f708e2e3c20f450f9659d2b94c06659d) in parent group-v870190. [ 654.797091] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Creating folder: Instances. Parent ref: group-v870256. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 654.797091] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.797091] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.797091] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.797326] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cb57288-1e64-4337-8cc3-b04ad106ab94 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.802783] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525aa67e-1ff9-1d24-2726-b47faf52172d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.812553] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Created folder: Instances in parent group-v870256. [ 654.812553] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 654.812824] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 654.812824] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e0ef86e-5ee6-47d6-a0b3-ad181e10f49f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.832110] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 654.832110] env[65503]: value = "task-4449546" [ 654.832110] env[65503]: _type = "Task" [ 654.832110] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.841336] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449546, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.072532] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Received event network-vif-plugged-0d0ad262-a7c3-46b2-962a-ae6db1e6279d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 655.072869] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquiring lock "d8d917c1-224c-4773-a911-d09f3f719e1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.073125] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.073302] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.073501] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] No waiting events found dispatching network-vif-plugged-0d0ad262-a7c3-46b2-962a-ae6db1e6279d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 655.074076] env[65503]: WARNING nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Received unexpected event network-vif-plugged-0d0ad262-a7c3-46b2-962a-ae6db1e6279d for instance with vm_state building and task_state spawning. [ 655.074253] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Received event network-changed-0d0ad262-a7c3-46b2-962a-ae6db1e6279d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 655.074461] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Refreshing instance network info cache due to event network-changed-0d0ad262-a7c3-46b2-962a-ae6db1e6279d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 655.074868] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquiring lock "refresh_cache-d8d917c1-224c-4773-a911-d09f3f719e1b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.075112] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquired lock "refresh_cache-d8d917c1-224c-4773-a911-d09f3f719e1b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.075179] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Refreshing network info cache for port 0d0ad262-a7c3-46b2-962a-ae6db1e6279d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 655.118687] env[65503]: WARNING neutronclient.v2_0.client [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 655.118687] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.118937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.118937] env[65503]: DEBUG nova.network.neutron [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 655.119143] env[65503]: DEBUG nova.objects.instance [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lazy-loading 'info_cache' on Instance uuid 39266117-e82e-48ae-932a-be04b1a7351a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 655.175519] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 655.175971] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 655.219236] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449543, 'name': PowerOnVM_Task} progress is 64%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.224931] env[65503]: DEBUG nova.compute.manager [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 655.228797] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221c8b4f-3e67-4f9b-8b4e-1f4ad44ed77d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.240508] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 655.240932] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 655.292467] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a4d5c52-7b54-4896-b7ad-101805209393 tempest-TenantUsagesTestJSON-1417297331 tempest-TenantUsagesTestJSON-1417297331-project-member] Lock "2dbc3860-c65c-4cbb-8d90-f1f74420e652" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.974s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.303188] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525aa67e-1ff9-1d24-2726-b47faf52172d, 'name': SearchDatastore_Task, 'duration_secs': 0.016068} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.307183] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.307183] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 16d508f9-72f0-4853-92fb-c8c7a37b5668/16d508f9-72f0-4853-92fb-c8c7a37b5668.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.307183] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.307183] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.307391] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54481dd5-6a76-483b-bb72-900cf9422343 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.308268] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b53613f5-eead-4dd7-b2ff-11fd28f80c29 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.321160] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 655.321160] env[65503]: value = "task-4449548" [ 655.321160] env[65503]: _type = "Task" [ 655.321160] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.326607] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.326801] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 655.333130] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73289dbf-5abe-4422-840f-05cacded8018 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.344289] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449548, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.346587] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 655.346587] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524ba3ea-96c8-f536-7aec-d798e7fe607d" [ 655.346587] env[65503]: _type = "Task" [ 655.346587] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.357933] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449546, 'name': CreateVM_Task, 'duration_secs': 0.365009} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.357933] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 655.359669] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.359669] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.359669] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 655.359669] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfe4c70f-52ec-45de-bdd1-ee169d58dade {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.365509] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524ba3ea-96c8-f536-7aec-d798e7fe607d, 'name': SearchDatastore_Task, 'duration_secs': 0.011432} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.366806] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0636973-9886-4973-a67f-8c7fcd520d44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.370759] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 655.370759] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5235ca3a-3312-6288-24b8-8f2541e77e20" [ 655.370759] env[65503]: _type = "Task" [ 655.370759] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.376498] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 655.376498] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f36521-72ce-1bc1-fa2b-21fb62dc6797" [ 655.376498] env[65503]: _type = "Task" [ 655.376498] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.385332] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5235ca3a-3312-6288-24b8-8f2541e77e20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.386858] env[65503]: WARNING neutronclient.v2_0.client [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 655.387674] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 655.388578] env[65503]: WARNING openstack [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 655.403922] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f36521-72ce-1bc1-fa2b-21fb62dc6797, 'name': SearchDatastore_Task, 'duration_secs': 0.019711} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.404204] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.404455] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 24e054d7-7662-47ef-8f69-4738c5ff9548/24e054d7-7662-47ef-8f69-4738c5ff9548.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.404719] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bfe42d3-a9f6-4fb6-86f5-feee5125b74c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.413215] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 655.413215] env[65503]: value = "task-4449549" [ 655.413215] env[65503]: _type = "Task" [ 655.413215] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.422432] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449549, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.427624] env[65503]: WARNING neutronclient.v2_0.client [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 655.428708] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 655.429387] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 655.545065] env[65503]: DEBUG nova.network.neutron [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Updating instance_info_cache with network_info: [{"id": "0349cd73-74a3-45ee-9582-091d2fe091f9", "address": "fa:16:3e:df:18:52", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0349cd73-74", "ovs_interfaceid": "0349cd73-74a3-45ee-9582-091d2fe091f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 655.580745] env[65503]: WARNING neutronclient.v2_0.client [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 655.581728] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 655.582177] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 655.631227] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Updated VIF entry in instance network info cache for port 88cbb4cd-7967-47a9-9cda-a3456020aefa. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 655.631227] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Updating instance_info_cache with network_info: [{"id": "88cbb4cd-7967-47a9-9cda-a3456020aefa", "address": "fa:16:3e:81:94:8d", "network": {"id": "668794e6-8271-493d-9479-e10ebcbc299b", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2113111945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a840b7f7e86745d79af6e9e1196e20fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "05b1253d-2b87-4158-9ff1-dafcf829f11f", "external-id": "nsx-vlan-transportzone-55", "segmentation_id": 55, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88cbb4cd-79", "ovs_interfaceid": "88cbb4cd-7967-47a9-9cda-a3456020aefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 655.723539] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449543, 'name': PowerOnVM_Task} progress is 82%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.751336] env[65503]: INFO nova.compute.manager [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] instance snapshotting [ 655.758298] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f5bb99-f29a-4814-95db-14a6676884b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.793750] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b219901-1c01-4b4c-9c39-8a4ff4b385b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.839359] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449548, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.885134] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5235ca3a-3312-6288-24b8-8f2541e77e20, 'name': SearchDatastore_Task, 'duration_secs': 0.015482} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.885635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.885908] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 655.886312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.886417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.886503] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.886804] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d09c5815-fbcc-4325-a9db-7dd3c4dc3b13 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.899960] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.900199] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 655.901197] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83a7cee9-1cae-4359-8f01-cf36a60fe5e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.911882] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 655.911882] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528a53bd-d42c-d84e-1a44-9d057fc5b774" [ 655.911882] env[65503]: _type = "Task" [ 655.911882] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.931150] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449549, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.933849] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528a53bd-d42c-d84e-1a44-9d057fc5b774, 'name': SearchDatastore_Task, 'duration_secs': 0.012491} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.938180] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-425cf2c1-aa4f-4a25-92c8-6eea7eb131d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.947103] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 655.947103] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52508ef9-f26c-f867-24e3-227d074dff9c" [ 655.947103] env[65503]: _type = "Task" [ 655.947103] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.957643] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52508ef9-f26c-f867-24e3-227d074dff9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.051151] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "refresh_cache-9dbaff4f-ab02-481b-b51f-b134021d277c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.051684] env[65503]: DEBUG nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Instance network_info: |[{"id": "0349cd73-74a3-45ee-9582-091d2fe091f9", "address": "fa:16:3e:df:18:52", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0349cd73-74", "ovs_interfaceid": "0349cd73-74a3-45ee-9582-091d2fe091f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 656.053149] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:18:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0349cd73-74a3-45ee-9582-091d2fe091f9', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.060744] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 656.061070] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.061365] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee310644-d87e-4393-aa02-c39470beb41c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.090460] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.090460] env[65503]: value = "task-4449550" [ 656.090460] env[65503]: _type = "Task" [ 656.090460] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.101439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "50f11559-b8c7-41a2-aa43-255a28ffa58c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.101790] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "50f11559-b8c7-41a2-aa43-255a28ffa58c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.101943] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "50f11559-b8c7-41a2-aa43-255a28ffa58c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.103710] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "50f11559-b8c7-41a2-aa43-255a28ffa58c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.103710] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "50f11559-b8c7-41a2-aa43-255a28ffa58c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.104107] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449550, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.104986] env[65503]: INFO nova.compute.manager [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Terminating instance [ 656.134976] env[65503]: WARNING neutronclient.v2_0.client [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 656.135160] env[65503]: WARNING openstack [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.135628] env[65503]: WARNING openstack [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.145398] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Releasing lock "refresh_cache-85d0ed1d-6306-4999-832b-f4e69233fec7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.145398] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Received event network-changed-939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 656.145572] env[65503]: DEBUG nova.compute.manager [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Refreshing instance network info cache due to event network-changed-939876df-2b3d-4723-8926-1187feb4fe37. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 656.145825] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquiring lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.145989] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Acquired lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.146186] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Refreshing network info cache for port 939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 656.188078] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.188381] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.222483] env[65503]: DEBUG oslo_vmware.api [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449543, 'name': PowerOnVM_Task, 'duration_secs': 2.01778} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.222794] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.223017] env[65503]: INFO nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Took 13.98 seconds to spawn the instance on the hypervisor. [ 656.223184] env[65503]: DEBUG nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 656.224120] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aed5779-fa9e-4fd8-986c-765746372b89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.320343] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 656.323699] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f2e0e08e-eacf-4707-a53d-7402acd9f5ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.336655] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 656.336655] env[65503]: value = "task-4449551" [ 656.336655] env[65503]: _type = "Task" [ 656.336655] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.347744] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449548, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582569} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.349321] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 16d508f9-72f0-4853-92fb-c8c7a37b5668/16d508f9-72f0-4853-92fb-c8c7a37b5668.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.349976] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.350641] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1662e6c2-70a8-489e-b53f-45b1cc65f2ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.356816] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf56e93a-7d9a-41af-817e-05b04a3c95b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.359808] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449551, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.366182] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4385f3f-e9f5-4496-ace8-e1e1ca3d9a74 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.375177] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 656.375177] env[65503]: value = "task-4449552" [ 656.375177] env[65503]: _type = "Task" [ 656.375177] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.422469] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f1ced8-65be-4dc8-aee4-558b1cbcc8e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.434891] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449552, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.443307] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449549, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.843113} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.444559] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 24e054d7-7662-47ef-8f69-4738c5ff9548/24e054d7-7662-47ef-8f69-4738c5ff9548.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.444888] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.445370] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a499059b-0149-42fd-8400-cb4883af1d20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.448798] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3dae57-5c24-4685-8c7f-e9559237f018 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.458518] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.458942] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.478665] env[65503]: DEBUG nova.compute.provider_tree [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 656.480459] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 656.480459] env[65503]: value = "task-4449553" [ 656.480459] env[65503]: _type = "Task" [ 656.480459] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.488677] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52508ef9-f26c-f867-24e3-227d074dff9c, 'name': SearchDatastore_Task, 'duration_secs': 0.071779} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.501242] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.501419] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ab09cfe5-8257-462b-9ebf-87081d5793ac/ab09cfe5-8257-462b-9ebf-87081d5793ac.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 656.513612] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b170c68-a882-4062-b35b-015486fdfd55 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.524243] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449553, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.525725] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 656.525725] env[65503]: value = "task-4449554" [ 656.525725] env[65503]: _type = "Task" [ 656.525725] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.541372] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.608781] env[65503]: WARNING openstack [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.609012] env[65503]: WARNING openstack [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.619727] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "refresh_cache-50f11559-b8c7-41a2-aa43-255a28ffa58c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.619727] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquired lock "refresh_cache-50f11559-b8c7-41a2-aa43-255a28ffa58c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.619727] env[65503]: DEBUG nova.network.neutron [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 656.627478] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449550, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.653554] env[65503]: WARNING neutronclient.v2_0.client [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 656.654640] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.655724] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.667629] env[65503]: WARNING neutronclient.v2_0.client [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 656.668329] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.669408] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.691767] env[65503]: DEBUG nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 656.751655] env[65503]: INFO nova.compute.manager [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Took 31.30 seconds to build instance. [ 656.792538] env[65503]: WARNING neutronclient.v2_0.client [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 656.793300] env[65503]: WARNING openstack [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.793649] env[65503]: WARNING openstack [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.853349] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449551, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.863415] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Updated VIF entry in instance network info cache for port 0d0ad262-a7c3-46b2-962a-ae6db1e6279d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 656.863771] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Updating instance_info_cache with network_info: [{"id": "0d0ad262-a7c3-46b2-962a-ae6db1e6279d", "address": "fa:16:3e:6d:c2:52", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d0ad262-a7", "ovs_interfaceid": "0d0ad262-a7c3-46b2-962a-ae6db1e6279d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 656.886345] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449552, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.178619} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.886345] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.886345] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0cc832-3469-4e9a-8f16-1d4e4487c853 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.914747] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 16d508f9-72f0-4853-92fb-c8c7a37b5668/16d508f9-72f0-4853-92fb-c8c7a37b5668.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.922694] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b493611-81cc-492c-a02f-9661570eb70a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.943361] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.943774] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.961023] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 656.961023] env[65503]: value = "task-4449555" [ 656.961023] env[65503]: _type = "Task" [ 656.961023] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.970800] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449555, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.017450] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079229} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.017450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "174c806e-c2e8-4064-8800-d4a35c19f5e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.017777] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.017777] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "174c806e-c2e8-4064-8800-d4a35c19f5e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.018013] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.018242] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.020251] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 657.023595] env[65503]: DEBUG nova.network.neutron [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance_info_cache with network_info: [{"id": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "address": "fa:16:3e:34:a3:37", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03697e78-59", "ovs_interfaceid": "03697e78-5935-45aa-a1d5-1bf8701e3f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 657.025615] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630f55a4-e2d6-4d36-8363-805e7d2e66b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.031009] env[65503]: ERROR nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [req-ddba63c4-d3d4-4c18-8a19-b1d42cb1f3f4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ddba63c4-d3d4-4c18-8a19-b1d42cb1f3f4"}]} [ 657.031953] env[65503]: INFO nova.compute.manager [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Terminating instance [ 657.074429] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 24e054d7-7662-47ef-8f69-4738c5ff9548/24e054d7-7662-47ef-8f69-4738c5ff9548.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 657.079668] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 657.083747] env[65503]: WARNING neutronclient.v2_0.client [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 657.084169] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 657.084486] env[65503]: WARNING openstack [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 657.094015] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2aa85dc-1a61-4c95-a6c9-e600d885be07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.111489] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449554, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.113134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "interface-85d0ed1d-6306-4999-832b-f4e69233fec7-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.113426] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "interface-85d0ed1d-6306-4999-832b-f4e69233fec7-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.113755] env[65503]: DEBUG nova.objects.instance [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lazy-loading 'flavor' on Instance uuid 85d0ed1d-6306-4999-832b-f4e69233fec7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 657.123754] env[65503]: WARNING neutronclient.v2_0.client [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 657.124580] env[65503]: WARNING openstack [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 657.125121] env[65503]: WARNING openstack [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 657.140492] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449550, 'name': CreateVM_Task, 'duration_secs': 0.663971} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.140956] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 657.140956] env[65503]: value = "task-4449556" [ 657.140956] env[65503]: _type = "Task" [ 657.140956] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.142191] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 657.142486] env[65503]: DEBUG nova.compute.provider_tree [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 657.145490] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 657.147330] env[65503]: WARNING neutronclient.v2_0.client [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 657.147955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.148170] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.148672] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 657.153995] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e82a0ec-e308-4fca-ab81-0748bced5cd1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.165032] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.168117] env[65503]: DEBUG nova.network.neutron [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 657.170480] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 657.170480] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520c00bb-209f-2805-0623-c8a0071a35f6" [ 657.170480] env[65503]: _type = "Task" [ 657.170480] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.171686] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 657.192400] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520c00bb-209f-2805-0623-c8a0071a35f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.201571] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 657.227528] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.239253] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Updated VIF entry in instance network info cache for port 939876df-2b3d-4723-8926-1187feb4fe37. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 657.239745] env[65503]: DEBUG nova.network.neutron [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Updating instance_info_cache with network_info: [{"id": "939876df-2b3d-4723-8926-1187feb4fe37", "address": "fa:16:3e:ac:ad:63", "network": {"id": "8a095ab6-7707-4d88-81ca-9d7588aec7de", "bridge": "br-int", "label": "tempest-ServersTestJSON-408175709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f55d9e8eeb34665aaa465057871d687", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap939876df-2b", "ovs_interfaceid": "939876df-2b3d-4723-8926-1187feb4fe37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 657.254984] env[65503]: DEBUG oslo_concurrency.lockutils [None req-961c5786-da2e-4374-a339-0e1fecfc44c0 tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.813s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.288569] env[65503]: DEBUG nova.network.neutron [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 657.354114] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449551, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.366880] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Releasing lock "refresh_cache-d8d917c1-224c-4773-a911-d09f3f719e1b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.367516] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Received event network-vif-plugged-24cf9d48-62c6-4756-bdcc-5008383a037b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 657.367770] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquiring lock "16d508f9-72f0-4853-92fb-c8c7a37b5668-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.368098] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.368262] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.368422] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] No waiting events found dispatching network-vif-plugged-24cf9d48-62c6-4756-bdcc-5008383a037b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 657.368714] env[65503]: WARNING nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Received unexpected event network-vif-plugged-24cf9d48-62c6-4756-bdcc-5008383a037b for instance with vm_state building and task_state spawning. [ 657.368977] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Received event network-changed-24cf9d48-62c6-4756-bdcc-5008383a037b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 657.369163] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Refreshing instance network info cache due to event network-changed-24cf9d48-62c6-4756-bdcc-5008383a037b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 657.369344] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquiring lock "refresh_cache-16d508f9-72f0-4853-92fb-c8c7a37b5668" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.369548] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquired lock "refresh_cache-16d508f9-72f0-4853-92fb-c8c7a37b5668" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.369621] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Refreshing network info cache for port 24cf9d48-62c6-4756-bdcc-5008383a037b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 657.472297] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449555, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.540133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-39266117-e82e-48ae-932a-be04b1a7351a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.540133] env[65503]: DEBUG nova.objects.instance [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lazy-loading 'migration_context' on Instance uuid 39266117-e82e-48ae-932a-be04b1a7351a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 657.540133] env[65503]: DEBUG nova.compute.manager [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 657.540367] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 657.549861] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7579e2a5-2f46-486e-8543-481df1499c1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.560978] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 657.564283] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9ceec4a-6c4b-4357-ab20-b0a41ea4cc46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.566058] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.866671} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.566688] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ab09cfe5-8257-462b-9ebf-87081d5793ac/ab09cfe5-8257-462b-9ebf-87081d5793ac.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 657.566904] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 657.567816] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8eb40b47-5eb1-410e-b8ee-9315e11934e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.576396] env[65503]: DEBUG oslo_vmware.api [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 657.576396] env[65503]: value = "task-4449558" [ 657.576396] env[65503]: _type = "Task" [ 657.576396] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.581523] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 657.581523] env[65503]: value = "task-4449559" [ 657.581523] env[65503]: _type = "Task" [ 657.581523] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.594226] env[65503]: DEBUG oslo_vmware.api [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.597384] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449559, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.623865] env[65503]: WARNING neutronclient.v2_0.client [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 657.625046] env[65503]: WARNING openstack [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 657.626091] env[65503]: WARNING openstack [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 657.632547] env[65503]: DEBUG nova.objects.instance [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lazy-loading 'pci_requests' on Instance uuid 85d0ed1d-6306-4999-832b-f4e69233fec7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 657.667500] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.685378] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520c00bb-209f-2805-0623-c8a0071a35f6, 'name': SearchDatastore_Task, 'duration_secs': 0.053089} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.685878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.686224] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.686224] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.686383] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.686575] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.690508] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dae58362-3517-49cd-8e14-b9ebaf15fa9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.704352] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.704665] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 657.705593] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aa661f1-bf92-4d6a-a62b-19cceb946277 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.717102] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 657.717102] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206d330-9625-f619-dd05-0e9187fcc093" [ 657.717102] env[65503]: _type = "Task" [ 657.717102] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.726879] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206d330-9625-f619-dd05-0e9187fcc093, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.743852] env[65503]: DEBUG oslo_concurrency.lockutils [req-f816913d-15c2-40db-8820-84050e246b0d req-9a65c492-f86c-40a6-badb-81aa5b8ec98c service nova] Releasing lock "refresh_cache-a1908e71-31f9-4308-b4d6-7908d3208c5a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.746454] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57077691-894d-4f95-8983-9d1fc2311028 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.756344] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf115f2-48f6-4374-9856-25edf8e40159 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.795938] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Releasing lock "refresh_cache-50f11559-b8c7-41a2-aa43-255a28ffa58c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.796460] env[65503]: DEBUG nova.compute.manager [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 657.796691] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 657.797861] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1f83f5-efcc-4698-8058-93fb62ce3ac2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.800975] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bf00d8-9795-438d-8ead-4adeb68fb268 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.813110] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea92541-95c6-4729-9763-914236ed8888 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.817369] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 657.817683] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77a6f07f-f87c-4ae4-aead-d73fee579968 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.834770] env[65503]: DEBUG nova.compute.provider_tree [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 657.838412] env[65503]: DEBUG oslo_vmware.api [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 657.838412] env[65503]: value = "task-4449560" [ 657.838412] env[65503]: _type = "Task" [ 657.838412] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.858585] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449551, 'name': CreateSnapshot_Task, 'duration_secs': 1.453754} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.858585] env[65503]: DEBUG oslo_vmware.api [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.859445] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 657.860260] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc34d50-8c22-4412-abb6-f14c33dcefb3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.874366] env[65503]: WARNING neutronclient.v2_0.client [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 657.875289] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 657.875573] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 657.925375] env[65503]: DEBUG nova.compute.manager [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Received event network-changed-ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 657.926649] env[65503]: DEBUG nova.compute.manager [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Refreshing instance network info cache due to event network-changed-ea1023c2-b650-4073-a973-29291b753f53. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 657.926649] env[65503]: DEBUG oslo_concurrency.lockutils [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Acquiring lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.926649] env[65503]: DEBUG oslo_concurrency.lockutils [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Acquired lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.926649] env[65503]: DEBUG nova.network.neutron [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Refreshing network info cache for port ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 657.977688] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449555, 'name': ReconfigVM_Task, 'duration_secs': 0.685259} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.978126] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 16d508f9-72f0-4853-92fb-c8c7a37b5668/16d508f9-72f0-4853-92fb-c8c7a37b5668.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.979066] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-822c3fad-28ef-48b2-a3b3-99fa82db177c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.990866] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 657.990866] env[65503]: value = "task-4449561" [ 657.990866] env[65503]: _type = "Task" [ 657.990866] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.004406] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449561, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.050828] env[65503]: DEBUG nova.objects.base [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Object Instance<39266117-e82e-48ae-932a-be04b1a7351a> lazy-loaded attributes: info_cache,migration_context {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 658.054034] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a018420d-4bf1-4d67-87a8-a10aecc5324a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.059039] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.059433] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.097016] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62598c7f-c988-4403-9f04-9cbb4353c66e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.109102] env[65503]: DEBUG oslo_vmware.api [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449558, 'name': PowerOffVM_Task, 'duration_secs': 0.312091} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.109102] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449559, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133825} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.110326] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 658.110543] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 658.110864] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.111238] env[65503]: DEBUG oslo_vmware.api [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 658.111238] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f74115-2696-f1d1-d7af-05e92631b78f" [ 658.111238] env[65503]: _type = "Task" [ 658.111238] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.111489] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b8ee223-2444-496b-97d0-d90c43500e65 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.117025] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f346072-24f8-49de-9950-4812c98b8a6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.149284] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] ab09cfe5-8257-462b-9ebf-87081d5793ac/ab09cfe5-8257-462b-9ebf-87081d5793ac.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.149763] env[65503]: DEBUG nova.objects.base [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Object Instance<85d0ed1d-6306-4999-832b-f4e69233fec7> lazy-loaded attributes: flavor,pci_requests {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 658.150019] env[65503]: DEBUG nova.network.neutron [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 658.150248] env[65503]: WARNING neutronclient.v2_0.client [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.151203] env[65503]: WARNING neutronclient.v2_0.client [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.151203] env[65503]: WARNING openstack [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.151911] env[65503]: WARNING openstack [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.158137] env[65503]: DEBUG oslo_vmware.api [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f74115-2696-f1d1-d7af-05e92631b78f, 'name': SearchDatastore_Task, 'duration_secs': 0.010342} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.163601] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37a8008a-0c81-4154-9eca-863ccf4e9787 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.178823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.189206] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449556, 'name': ReconfigVM_Task, 'duration_secs': 0.720186} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.190939] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 24e054d7-7662-47ef-8f69-4738c5ff9548/24e054d7-7662-47ef-8f69-4738c5ff9548.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 658.191705] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 658.191705] env[65503]: value = "task-4449563" [ 658.191705] env[65503]: _type = "Task" [ 658.191705] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.191938] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f7e7b47-fd1f-43be-b478-4e72938b2d52 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.206245] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449563, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.207931] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 658.207931] env[65503]: value = "task-4449564" [ 658.207931] env[65503]: _type = "Task" [ 658.207931] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.216209] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.216388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.218921] env[65503]: WARNING neutronclient.v2_0.client [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.219645] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.220058] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.245313] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449564, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.253507] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206d330-9625-f619-dd05-0e9187fcc093, 'name': SearchDatastore_Task, 'duration_secs': 0.021481} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.254771] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-686d4bff-5e0f-4d6d-ae0f-78dba6ab7e7b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.266253] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 658.266253] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529889f2-cb00-8a7b-0f54-dd07ef3c2064" [ 658.266253] env[65503]: _type = "Task" [ 658.266253] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.275666] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529889f2-cb00-8a7b-0f54-dd07ef3c2064, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.277148] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 658.277334] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 658.277566] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleting the datastore file [datastore1] 174c806e-c2e8-4064-8800-d4a35c19f5e6 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 658.277926] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79c68600-2459-457a-b876-a094bffdd633 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.285361] env[65503]: DEBUG oslo_vmware.api [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 658.285361] env[65503]: value = "task-4449565" [ 658.285361] env[65503]: _type = "Task" [ 658.285361] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.298541] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fa63dc8c-bd77-433a-bf96-82f133a1f7ab tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "interface-85d0ed1d-6306-4999-832b-f4e69233fec7-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.185s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.304168] env[65503]: DEBUG oslo_vmware.api [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.351512] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Updated VIF entry in instance network info cache for port 24cf9d48-62c6-4756-bdcc-5008383a037b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 658.351743] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Updating instance_info_cache with network_info: [{"id": "24cf9d48-62c6-4756-bdcc-5008383a037b", "address": "fa:16:3e:35:d4:3c", "network": {"id": "a17078ee-d882-4b40-b077-5517204ce959", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1994350859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85db5c8415744defa437bd02a64803e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24cf9d48-62", "ovs_interfaceid": "24cf9d48-62c6-4756-bdcc-5008383a037b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 658.357485] env[65503]: DEBUG oslo_vmware.api [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449560, 'name': PowerOffVM_Task, 'duration_secs': 0.133782} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.357485] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 658.357485] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 658.357846] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-565ea601-5768-479a-a4b6-687a664b5d31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.372578] env[65503]: ERROR nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [req-b97aac0c-a494-4bba-b5f8-22b678d687eb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b97aac0c-a494-4bba-b5f8-22b678d687eb"}]} [ 658.391425] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 658.392059] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-efeff82a-40db-434e-b29e-e186d0a19646 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.403962] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 658.406535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 658.406882] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 658.407091] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Deleting the datastore file [datastore1] 50f11559-b8c7-41a2-aa43-255a28ffa58c {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 658.408595] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f42d6f6-d20e-45b4-bece-58b09b2f18ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.417189] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 658.417189] env[65503]: value = "task-4449567" [ 658.417189] env[65503]: _type = "Task" [ 658.417189] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.423784] env[65503]: DEBUG oslo_vmware.api [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for the task: (returnval){ [ 658.423784] env[65503]: value = "task-4449568" [ 658.423784] env[65503]: _type = "Task" [ 658.423784] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.432890] env[65503]: WARNING neutronclient.v2_0.client [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.433198] env[65503]: WARNING openstack [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.433776] env[65503]: WARNING openstack [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.445649] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449567, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.456167] env[65503]: DEBUG oslo_vmware.api [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.464248] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 658.464610] env[65503]: DEBUG nova.compute.provider_tree [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 658.495042] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 658.509518] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449561, 'name': Rename_Task, 'duration_secs': 0.273055} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.509923] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.510250] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4cdd4c06-f0e6-4e34-b763-6a636781085e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.520543] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 658.520543] env[65503]: value = "task-4449569" [ 658.520543] env[65503]: _type = "Task" [ 658.520543] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.526501] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 658.542571] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.687801] env[65503]: WARNING openstack [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.687801] env[65503]: WARNING openstack [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.704855] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449563, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.722392] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449564, 'name': Rename_Task, 'duration_secs': 0.239415} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.724992] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.725555] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f0e753f-7ad9-4047-bb56-ab0dc5e801bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.731384] env[65503]: DEBUG nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 658.739601] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 658.739601] env[65503]: value = "task-4449570" [ 658.739601] env[65503]: _type = "Task" [ 658.739601] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.758867] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449570, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.778217] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529889f2-cb00-8a7b-0f54-dd07ef3c2064, 'name': SearchDatastore_Task, 'duration_secs': 0.027696} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.778574] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.778852] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 9dbaff4f-ab02-481b-b51f-b134021d277c/9dbaff4f-ab02-481b-b51f-b134021d277c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 658.780225] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df299e20-7811-4f5c-b1a7-caa8928e4768 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.792486] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 658.792486] env[65503]: value = "task-4449571" [ 658.792486] env[65503]: _type = "Task" [ 658.792486] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.800404] env[65503]: DEBUG oslo_vmware.api [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308093} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.801204] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 658.805026] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 658.805026] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 658.805026] env[65503]: INFO nova.compute.manager [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Took 1.26 seconds to destroy the instance on the hypervisor. [ 658.805026] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 658.805026] env[65503]: DEBUG nova.compute.manager [-] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 658.805026] env[65503]: DEBUG nova.network.neutron [-] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 658.805527] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.805527] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.805527] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.824512] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.826320] env[65503]: WARNING neutronclient.v2_0.client [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.828257] env[65503]: WARNING openstack [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.828257] env[65503]: WARNING openstack [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.857533] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Releasing lock "refresh_cache-16d508f9-72f0-4853-92fb-c8c7a37b5668" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.857802] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Received event network-vif-plugged-b9ca30d7-8bbd-483b-bc06-14bf5c43111c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 658.859016] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquiring lock "24e054d7-7662-47ef-8f69-4738c5ff9548-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.859016] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.859016] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.859016] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] No waiting events found dispatching network-vif-plugged-b9ca30d7-8bbd-483b-bc06-14bf5c43111c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 658.859016] env[65503]: WARNING nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Received unexpected event network-vif-plugged-b9ca30d7-8bbd-483b-bc06-14bf5c43111c for instance with vm_state building and task_state spawning. [ 658.859305] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Received event network-changed-b9ca30d7-8bbd-483b-bc06-14bf5c43111c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 658.859305] env[65503]: DEBUG nova.compute.manager [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Refreshing instance network info cache due to event network-changed-b9ca30d7-8bbd-483b-bc06-14bf5c43111c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 658.859305] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquiring lock "refresh_cache-24e054d7-7662-47ef-8f69-4738c5ff9548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.859305] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Acquired lock "refresh_cache-24e054d7-7662-47ef-8f69-4738c5ff9548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.859441] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Refreshing network info cache for port b9ca30d7-8bbd-483b-bc06-14bf5c43111c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 658.906433] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.939886] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449567, 'name': CloneVM_Task} progress is 93%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.947168] env[65503]: DEBUG oslo_vmware.api [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Task: {'id': task-4449568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187899} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.947903] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 658.947903] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 658.947903] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 658.948090] env[65503]: INFO nova.compute.manager [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 658.948327] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 658.948620] env[65503]: DEBUG nova.compute.manager [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 658.948705] env[65503]: DEBUG nova.network.neutron [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 658.948957] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.949582] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.949869] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.032482] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449569, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.104691] env[65503]: DEBUG nova.network.neutron [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Updated VIF entry in instance network info cache for port ea1023c2-b650-4073-a973-29291b753f53. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 659.104916] env[65503]: DEBUG nova.network.neutron [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Updating instance_info_cache with network_info: [{"id": "ea1023c2-b650-4073-a973-29291b753f53", "address": "fa:16:3e:dc:11:e9", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea1023c2-b6", "ovs_interfaceid": "ea1023c2-b650-4073-a973-29291b753f53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 659.129451] env[65503]: DEBUG nova.network.neutron [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 659.129451] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.214792] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449563, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.235354] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4c324b-79a2-40da-a55b-0cf8ac1f9e9e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.254848] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e030c29e-32f2-4762-a0e0-6994c42f2843 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.303357] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449570, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.305329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.309526] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797b39b4-9320-48bb-86a4-c0cbef8895db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.320894] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449571, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.324946] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f46cdc3-afa3-4663-a0af-b0bf751e0186 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.348914] env[65503]: DEBUG nova.compute.provider_tree [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 659.367339] env[65503]: WARNING neutronclient.v2_0.client [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.367707] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.368026] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.428567] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449567, 'name': CloneVM_Task} progress is 93%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.534222] env[65503]: DEBUG oslo_vmware.api [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449569, 'name': PowerOnVM_Task, 'duration_secs': 0.521595} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.534673] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.534823] env[65503]: INFO nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Took 13.68 seconds to spawn the instance on the hypervisor. [ 659.534878] env[65503]: DEBUG nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 659.535766] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0f40b6-09b4-43f2-b680-8cbf0c8670ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.608960] env[65503]: DEBUG oslo_concurrency.lockutils [req-b3cd8071-1525-4eaa-b294-0fb6a7548a8f req-9fe54f2d-1a96-4280-bd77-29b5da2440f3 service nova] Releasing lock "refresh_cache-9297d849-a966-48da-ba6a-453c42b99e44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.623013] env[65503]: DEBUG nova.compute.manager [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Received event network-vif-plugged-0349cd73-74a3-45ee-9582-091d2fe091f9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 659.623140] env[65503]: DEBUG oslo_concurrency.lockutils [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Acquiring lock "9dbaff4f-ab02-481b-b51f-b134021d277c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.623485] env[65503]: DEBUG oslo_concurrency.lockutils [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.623485] env[65503]: DEBUG oslo_concurrency.lockutils [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.623665] env[65503]: DEBUG nova.compute.manager [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] No waiting events found dispatching network-vif-plugged-0349cd73-74a3-45ee-9582-091d2fe091f9 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 659.623763] env[65503]: WARNING nova.compute.manager [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Received unexpected event network-vif-plugged-0349cd73-74a3-45ee-9582-091d2fe091f9 for instance with vm_state building and task_state spawning. [ 659.623910] env[65503]: DEBUG nova.compute.manager [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Received event network-changed-0349cd73-74a3-45ee-9582-091d2fe091f9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 659.624108] env[65503]: DEBUG nova.compute.manager [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Refreshing instance network info cache due to event network-changed-0349cd73-74a3-45ee-9582-091d2fe091f9. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 659.624583] env[65503]: DEBUG oslo_concurrency.lockutils [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Acquiring lock "refresh_cache-9dbaff4f-ab02-481b-b51f-b134021d277c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.624583] env[65503]: DEBUG oslo_concurrency.lockutils [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Acquired lock "refresh_cache-9dbaff4f-ab02-481b-b51f-b134021d277c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.624583] env[65503]: DEBUG nova.network.neutron [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Refreshing network info cache for port 0349cd73-74a3-45ee-9582-091d2fe091f9 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 659.637432] env[65503]: DEBUG nova.network.neutron [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 659.709450] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449563, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.739154] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.739546] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.761173] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449570, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.818512] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449571, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654596} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.818798] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 9dbaff4f-ab02-481b-b51f-b134021d277c/9dbaff4f-ab02-481b-b51f-b134021d277c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 659.819095] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 659.819435] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d1aa259-8f0e-4ed3-bbfb-a7609649ed29 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.828020] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 659.828020] env[65503]: value = "task-4449573" [ 659.828020] env[65503]: _type = "Task" [ 659.828020] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.842575] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.896046] env[65503]: DEBUG nova.network.neutron [-] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 659.900230] env[65503]: DEBUG nova.scheduler.client.report [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 36 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 659.900658] env[65503]: DEBUG nova.compute.provider_tree [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 36 to 37 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 659.900769] env[65503]: DEBUG nova.compute.provider_tree [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 659.932197] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449567, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.968490] env[65503]: WARNING neutronclient.v2_0.client [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.969240] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.969780] env[65503]: WARNING openstack [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 660.054865] env[65503]: INFO nova.compute.manager [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Took 27.84 seconds to build instance. [ 660.128131] env[65503]: WARNING neutronclient.v2_0.client [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 660.128529] env[65503]: WARNING openstack [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 660.128956] env[65503]: WARNING openstack [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 660.141417] env[65503]: INFO nova.compute.manager [-] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Took 1.19 seconds to deallocate network for instance. [ 660.210985] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449563, 'name': ReconfigVM_Task, 'duration_secs': 1.63641} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.211504] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Reconfigured VM instance instance-00000017 to attach disk [datastore2] ab09cfe5-8257-462b-9ebf-87081d5793ac/ab09cfe5-8257-462b-9ebf-87081d5793ac.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.212579] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-168e7b00-8a03-4e6b-b711-cdec460bc2ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.221220] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 660.221220] env[65503]: value = "task-4449574" [ 660.221220] env[65503]: _type = "Task" [ 660.221220] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.233955] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449574, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.259868] env[65503]: DEBUG oslo_vmware.api [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449570, 'name': PowerOnVM_Task, 'duration_secs': 1.460202} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.260128] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 660.260324] env[65503]: INFO nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Took 11.65 seconds to spawn the instance on the hypervisor. [ 660.260524] env[65503]: DEBUG nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 660.261397] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976e0502-134a-4735-aa82-38717fadd75f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.342833] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069258} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.343184] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 660.346881] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cce294-73b7-4ee6-a59d-a5bd994e1862 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.372994] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 9dbaff4f-ab02-481b-b51f-b134021d277c/9dbaff4f-ab02-481b-b51f-b134021d277c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 660.375371] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Updated VIF entry in instance network info cache for port b9ca30d7-8bbd-483b-bc06-14bf5c43111c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 660.375756] env[65503]: DEBUG nova.network.neutron [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Updating instance_info_cache with network_info: [{"id": "b9ca30d7-8bbd-483b-bc06-14bf5c43111c", "address": "fa:16:3e:01:91:2d", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.213", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9ca30d7-8b", "ovs_interfaceid": "b9ca30d7-8bbd-483b-bc06-14bf5c43111c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 660.377206] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-387575fc-a4a0-44ba-9b81-8fa2a3ba32ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.400835] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 660.400835] env[65503]: value = "task-4449575" [ 660.400835] env[65503]: _type = "Task" [ 660.400835] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.406498] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.692s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.407138] env[65503]: DEBUG nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 660.413561] env[65503]: INFO nova.compute.manager [-] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Took 1.61 seconds to deallocate network for instance. [ 660.417134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.561s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.417134] env[65503]: INFO nova.compute.claims [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.418522] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449575, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.433678] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449567, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.529286] env[65503]: WARNING openstack [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 660.530071] env[65503]: WARNING openstack [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 660.557276] env[65503]: DEBUG oslo_concurrency.lockutils [None req-583b3457-288d-4ca1-9724-7beca1861ae4 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.373s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.639701] env[65503]: WARNING neutronclient.v2_0.client [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 660.645766] env[65503]: WARNING openstack [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 660.645766] env[65503]: WARNING openstack [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 660.658259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.735727] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449574, 'name': Rename_Task, 'duration_secs': 0.373232} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.736012] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 660.737785] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcf6f034-0011-47d0-8941-fbe57d9e9212 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.745651] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 660.745651] env[65503]: value = "task-4449576" [ 660.745651] env[65503]: _type = "Task" [ 660.745651] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.757504] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.782994] env[65503]: INFO nova.compute.manager [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Took 27.49 seconds to build instance. [ 660.894521] env[65503]: DEBUG oslo_concurrency.lockutils [req-0c31468d-beaf-48b1-bac8-2cd0fe597564 req-b14d8cb4-0456-418a-bbc3-95aadb4939dd service nova] Releasing lock "refresh_cache-24e054d7-7662-47ef-8f69-4738c5ff9548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.916968] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.920973] env[65503]: DEBUG nova.compute.utils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 660.927136] env[65503]: DEBUG nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 660.927370] env[65503]: DEBUG nova.network.neutron [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 660.928198] env[65503]: WARNING neutronclient.v2_0.client [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 660.928596] env[65503]: WARNING neutronclient.v2_0.client [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 660.932894] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 660.933445] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 660.942434] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.959412] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449567, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.091580] env[65503]: DEBUG nova.network.neutron [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Updated VIF entry in instance network info cache for port 0349cd73-74a3-45ee-9582-091d2fe091f9. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 661.093276] env[65503]: DEBUG nova.network.neutron [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Updating instance_info_cache with network_info: [{"id": "0349cd73-74a3-45ee-9582-091d2fe091f9", "address": "fa:16:3e:df:18:52", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0349cd73-74", "ovs_interfaceid": "0349cd73-74a3-45ee-9582-091d2fe091f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 661.256835] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449576, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.261607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.261768] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.287565] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f4985254-a42e-4e90-bdcf-4f64666e1eef tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.008s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.415617] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449575, 'name': ReconfigVM_Task, 'duration_secs': 0.715209} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.416573] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 9dbaff4f-ab02-481b-b51f-b134021d277c/9dbaff4f-ab02-481b-b51f-b134021d277c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 661.417038] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d07d2d8-eb82-4e3a-8720-3aab29be45c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.427237] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 661.427237] env[65503]: value = "task-4449577" [ 661.427237] env[65503]: _type = "Task" [ 661.427237] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.430332] env[65503]: DEBUG nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 661.441904] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449577, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.457191] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449567, 'name': CloneVM_Task, 'duration_secs': 2.667319} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.457191] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Created linked-clone VM from snapshot [ 661.458828] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1ea0da-94e6-4277-a928-a7bbcca8e563 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.470351] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Uploading image 8257b445-480c-402b-8269-1eadd665a26f {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 661.525191] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 661.525191] env[65503]: value = "vm-870262" [ 661.525191] env[65503]: _type = "VirtualMachine" [ 661.525191] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 661.527176] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-807b1ddf-4641-4c23-9318-3e04761b1f70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.536786] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lease: (returnval){ [ 661.536786] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520d4e0b-d703-797c-8704-b8108978dd13" [ 661.536786] env[65503]: _type = "HttpNfcLease" [ 661.536786] env[65503]: } obtained for exporting VM: (result){ [ 661.536786] env[65503]: value = "vm-870262" [ 661.536786] env[65503]: _type = "VirtualMachine" [ 661.536786] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 661.537247] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the lease: (returnval){ [ 661.537247] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520d4e0b-d703-797c-8704-b8108978dd13" [ 661.537247] env[65503]: _type = "HttpNfcLease" [ 661.537247] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 661.551423] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 661.551423] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520d4e0b-d703-797c-8704-b8108978dd13" [ 661.551423] env[65503]: _type = "HttpNfcLease" [ 661.551423] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 661.597377] env[65503]: DEBUG oslo_concurrency.lockutils [req-a481bd68-bfcd-4012-8ecb-07e52131bcf9 req-fbe86e23-262c-4c4e-b4b7-498b33e1068a service nova] Releasing lock "refresh_cache-9dbaff4f-ab02-481b-b51f-b134021d277c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.761163] env[65503]: DEBUG oslo_vmware.api [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449576, 'name': PowerOnVM_Task, 'duration_secs': 0.87469} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.761486] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 661.761765] env[65503]: INFO nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Took 7.82 seconds to spawn the instance on the hypervisor. [ 661.761985] env[65503]: DEBUG nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 661.762967] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32baefb4-e746-412b-8fc8-549e9a3ebdb2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.769010] env[65503]: DEBUG nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 661.947507] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449577, 'name': Rename_Task, 'duration_secs': 0.22572} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.948153] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.948153] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58efcd77-6995-40dd-90e8-7ca91672ebdc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.956123] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 661.956123] env[65503]: value = "task-4449580" [ 661.956123] env[65503]: _type = "Task" [ 661.956123] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.970925] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.009771] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97855a0b-1490-4082-a7e2-22d2d596072b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.019437] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4853c2c7-8df3-44da-9533-da606892ca61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.061780] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c0316f-2b6e-4ac3-a27c-f2a95198cb06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.069957] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 662.069957] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520d4e0b-d703-797c-8704-b8108978dd13" [ 662.069957] env[65503]: _type = "HttpNfcLease" [ 662.069957] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 662.072779] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 662.072779] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520d4e0b-d703-797c-8704-b8108978dd13" [ 662.072779] env[65503]: _type = "HttpNfcLease" [ 662.072779] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 662.074152] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9cfba8-0891-497b-bba3-f7dd05836454 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.077495] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3517389f-3554-41f9-aac3-e1410e325e3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.088767] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522b1728-2ed5-9699-1a38-aa5630fd3f7a/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 662.089145] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522b1728-2ed5-9699-1a38-aa5630fd3f7a/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 662.100086] env[65503]: DEBUG nova.compute.provider_tree [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.355403] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7cdb04e3-e184-42a6-9446-d56e17ecedb6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.355403] env[65503]: INFO nova.compute.manager [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Took 26.76 seconds to build instance. [ 662.355403] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.355403] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.355403] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.413033] env[65503]: DEBUG nova.policy [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4d889f5b4554fdf887ad0e27a7370aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6797718310754721b81c8d30acfdacd5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 662.444792] env[65503]: DEBUG nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 662.478687] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.486308] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 662.486778] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.488138] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 662.488138] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.488138] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 662.488138] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 662.488358] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.488549] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 662.488804] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 662.489039] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 662.489290] env[65503]: DEBUG nova.virt.hardware [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 662.490353] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5defa2e9-e71c-4a85-9ad4-b6919adcbb53 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.500283] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11424cd-bc9a-4dfb-8709-52261a8b43dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.524770] env[65503]: DEBUG nova.compute.manager [req-60ff3bd8-b6bf-4db4-bb7a-6581c59f07e6 req-84f43db3-857b-45ab-88ab-bff70ce47c28 service nova] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Received event network-vif-deleted-831f4b30-3858-4674-b6f1-f7f00241ca20 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 662.603270] env[65503]: DEBUG nova.scheduler.client.report [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.793763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-67887dad-6e0a-431c-805e-297be116efea tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "ab09cfe5-8257-462b-9ebf-87081d5793ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.271s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.817269] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "85d0ed1d-6306-4999-832b-f4e69233fec7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.817920] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.818813] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "85d0ed1d-6306-4999-832b-f4e69233fec7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.819034] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.819203] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.824685] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.826661] env[65503]: INFO nova.compute.manager [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Terminating instance [ 662.831664] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.841333] env[65503]: DEBUG nova.network.neutron [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Successfully created port: f303ecf7-0607-45a0-bad8-c2eb7e30b62c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 662.844346] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.846134] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.846134] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.846134] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.846134] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 662.846391] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.977132] env[65503]: DEBUG oslo_vmware.api [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449580, 'name': PowerOnVM_Task, 'duration_secs': 0.703212} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.978237] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.978429] env[65503]: INFO nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Took 11.63 seconds to spawn the instance on the hypervisor. [ 662.978635] env[65503]: DEBUG nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 662.979577] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcc3307-0322-4a4f-ab8c-cdd3c53eebed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.109143] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.695s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.110260] env[65503]: DEBUG nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 663.113312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.387s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.117128] env[65503]: INFO nova.compute.claims [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.343386] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "d8d917c1-224c-4773-a911-d09f3f719e1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.343656] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.345181] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "d8d917c1-224c-4773-a911-d09f3f719e1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.345181] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.345528] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.349654] env[65503]: DEBUG nova.compute.manager [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 663.349654] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 663.354786] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.355967] env[65503]: INFO nova.compute.manager [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Terminating instance [ 663.360084] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a049bc-ddbf-4046-a0be-19bbdce968fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.382889] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 663.384425] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d8c510b-5aad-4184-bddc-0409e3ac6dac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.396039] env[65503]: DEBUG oslo_vmware.api [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 663.396039] env[65503]: value = "task-4449581" [ 663.396039] env[65503]: _type = "Task" [ 663.396039] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.412523] env[65503]: DEBUG oslo_vmware.api [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449581, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.515234] env[65503]: INFO nova.compute.manager [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Took 29.62 seconds to build instance. [ 663.623943] env[65503]: DEBUG nova.compute.utils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.626849] env[65503]: DEBUG nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 663.627523] env[65503]: DEBUG nova.network.neutron [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 663.628237] env[65503]: WARNING neutronclient.v2_0.client [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 663.628809] env[65503]: WARNING neutronclient.v2_0.client [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 663.629658] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 663.631024] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 663.874414] env[65503]: DEBUG nova.compute.manager [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 663.874820] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 663.875865] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b026f6-fbe6-45a6-af60-a4d9fa57ad5c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.887376] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 663.888630] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4d94a71-be98-4af7-af06-2e8f72df2faa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.897699] env[65503]: DEBUG oslo_vmware.api [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 663.897699] env[65503]: value = "task-4449582" [ 663.897699] env[65503]: _type = "Task" [ 663.897699] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.912245] env[65503]: DEBUG oslo_vmware.api [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449581, 'name': PowerOffVM_Task, 'duration_secs': 0.422021} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.917076] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 663.917520] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 663.917729] env[65503]: DEBUG oslo_vmware.api [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.918159] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-645ff622-274d-4ed0-94f0-07e407b35f23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.993286] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 663.993610] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 663.993874] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Deleting the datastore file [datastore1] 85d0ed1d-6306-4999-832b-f4e69233fec7 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 663.995467] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2daca41b-bc0d-492d-9c94-c9ead83a029f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.006631] env[65503]: DEBUG oslo_vmware.api [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for the task: (returnval){ [ 664.006631] env[65503]: value = "task-4449584" [ 664.006631] env[65503]: _type = "Task" [ 664.006631] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.019969] env[65503]: DEBUG oslo_concurrency.lockutils [None req-265ac6d3-047d-4747-aa14-e93de6a45c8a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.138s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.021777] env[65503]: DEBUG oslo_vmware.api [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.056522] env[65503]: DEBUG nova.policy [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c20f373f78a5482299298fe4e8d6b514', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eecbcfbbb0904f57939c23ef03418bd5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 664.143382] env[65503]: DEBUG nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 664.422781] env[65503]: DEBUG oslo_vmware.api [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449582, 'name': PowerOffVM_Task, 'duration_secs': 0.269178} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.423121] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 664.424977] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 664.424977] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7c98c52-6f7a-4143-a007-c651d575b30f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.497993] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 664.500896] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 664.501465] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Deleting the datastore file [datastore1] d8d917c1-224c-4773-a911-d09f3f719e1b {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 664.502340] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf670092-35d3-4887-b714-ab1dc67f4b86 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.521394] env[65503]: DEBUG oslo_vmware.api [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for the task: (returnval){ [ 664.521394] env[65503]: value = "task-4449586" [ 664.521394] env[65503]: _type = "Task" [ 664.521394] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.530148] env[65503]: DEBUG oslo_vmware.api [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Task: {'id': task-4449584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226359} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.530148] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 664.530148] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 664.530148] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 664.530148] env[65503]: INFO nova.compute.manager [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 664.530853] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 664.530853] env[65503]: DEBUG nova.compute.manager [-] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 664.530853] env[65503]: DEBUG nova.network.neutron [-] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 664.530853] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 664.532383] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 664.532383] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 664.541849] env[65503]: DEBUG oslo_vmware.api [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.595461] env[65503]: DEBUG nova.network.neutron [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Successfully created port: ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 664.683255] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a355f66-0f77-4e87-9f69-121b0130b92e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.693475] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de53ff0-4119-4831-8ae4-d3697042ece3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.733958] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 664.740124] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fae4471-58c1-4297-8d6d-683411e8ab87 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.746160] env[65503]: DEBUG nova.network.neutron [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Successfully updated port: f303ecf7-0607-45a0-bad8-c2eb7e30b62c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 664.751746] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b7bbbf-425b-4577-965f-f00f559094e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.773123] env[65503]: DEBUG nova.compute.provider_tree [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 665.037030] env[65503]: DEBUG oslo_vmware.api [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Task: {'id': task-4449586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17454} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.041948] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 665.041948] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 665.041948] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 665.041948] env[65503]: INFO nova.compute.manager [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 665.041948] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 665.042383] env[65503]: DEBUG nova.compute.manager [-] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 665.042383] env[65503]: DEBUG nova.network.neutron [-] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 665.042383] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 665.042646] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 665.042935] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 665.165175] env[65503]: DEBUG nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 665.196038] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 665.196236] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.196327] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 665.196514] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.196657] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 665.196822] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 665.197072] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 665.197244] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 665.197409] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 665.197566] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 665.197731] env[65503]: DEBUG nova.virt.hardware [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 665.198749] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41908776-f403-4633-9995-96e3ed17041e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.209422] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0343419a-c1f7-44fd-9ede-362db87dd811 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.251814] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "refresh_cache-8c274097-234a-44be-9159-c2fb0f1a8da1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.252134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired lock "refresh_cache-8c274097-234a-44be-9159-c2fb0f1a8da1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.252428] env[65503]: DEBUG nova.network.neutron [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 665.297796] env[65503]: ERROR nova.scheduler.client.report [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [req-35f5ec40-bd8b-4f94-8803-c281d2bfb044] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-35f5ec40-bd8b-4f94-8803-c281d2bfb044"}]} [ 665.317026] env[65503]: DEBUG nova.scheduler.client.report [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 665.339504] env[65503]: DEBUG nova.scheduler.client.report [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 665.339504] env[65503]: DEBUG nova.compute.provider_tree [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 665.355767] env[65503]: DEBUG nova.scheduler.client.report [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 665.377354] env[65503]: DEBUG nova.scheduler.client.report [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 665.669904] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 665.756241] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 665.756241] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 665.792253] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a87776-120a-4c91-a085-39bcd883bcff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.803207] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a32c471-9a4a-493d-8135-93228c8ab78d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.835739] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f049241-6b91-4267-bedb-f9c55a339569 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.844301] env[65503]: DEBUG nova.network.neutron [-] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 665.846703] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83beaed4-8738-4cd0-b17f-3dc273d21572 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.865097] env[65503]: DEBUG nova.compute.provider_tree [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 666.354021] env[65503]: INFO nova.compute.manager [-] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Took 1.82 seconds to deallocate network for instance. [ 666.406577] env[65503]: DEBUG nova.scheduler.client.report [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 38 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 666.406861] env[65503]: DEBUG nova.compute.provider_tree [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 38 to 39 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 666.407092] env[65503]: DEBUG nova.compute.provider_tree [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 666.412214] env[65503]: DEBUG nova.network.neutron [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Successfully updated port: ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 666.645670] env[65503]: DEBUG nova.network.neutron [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 666.725412] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 666.727255] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 666.852283] env[65503]: WARNING neutronclient.v2_0.client [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 666.853048] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 666.853457] env[65503]: WARNING openstack [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 666.863237] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.915523] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.915807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquired lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.915996] env[65503]: DEBUG nova.network.neutron [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 666.918593] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.805s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.919299] env[65503]: DEBUG nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 666.933978] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.812s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.934292] env[65503]: DEBUG nova.objects.instance [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 667.013538] env[65503]: DEBUG nova.network.neutron [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Updating instance_info_cache with network_info: [{"id": "f303ecf7-0607-45a0-bad8-c2eb7e30b62c", "address": "fa:16:3e:d7:46:b0", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf303ecf7-06", "ovs_interfaceid": "f303ecf7-0607-45a0-bad8-c2eb7e30b62c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 667.116782] env[65503]: DEBUG nova.network.neutron [-] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 667.428620] env[65503]: DEBUG nova.compute.utils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 667.430144] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 667.430144] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 667.441917] env[65503]: DEBUG nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 667.442373] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 667.442669] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 667.442754] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 667.443368] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 667.443705] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 667.517789] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Releasing lock "refresh_cache-8c274097-234a-44be-9159-c2fb0f1a8da1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.519256] env[65503]: DEBUG nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Instance network_info: |[{"id": "f303ecf7-0607-45a0-bad8-c2eb7e30b62c", "address": "fa:16:3e:d7:46:b0", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf303ecf7-06", "ovs_interfaceid": "f303ecf7-0607-45a0-bad8-c2eb7e30b62c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 667.519924] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:46:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f303ecf7-0607-45a0-bad8-c2eb7e30b62c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.537937] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 667.538797] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.539278] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-824923e6-dc2f-4be2-a90d-4b4e1752d399 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.568406] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.568406] env[65503]: value = "task-4449587" [ 667.568406] env[65503]: _type = "Task" [ 667.568406] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.587617] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449587, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.600214] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "8f0bf665-b21b-42ed-816d-69dee2f40654" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.602020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.621588] env[65503]: DEBUG nova.network.neutron [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 667.624939] env[65503]: INFO nova.compute.manager [-] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Took 2.58 seconds to deallocate network for instance. [ 667.681757] env[65503]: DEBUG nova.policy [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '007d95ce82f34492a4cd26bdf1df313a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae68676f87fd4edc802b2e4b4917ceec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 667.809285] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 667.809859] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 667.856907] env[65503]: DEBUG nova.compute.manager [req-37f745af-9adb-4870-ab06-c4a6f9981064 req-a176310b-b237-4459-8574-05fd9dbd7d45 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Received event network-vif-plugged-f303ecf7-0607-45a0-bad8-c2eb7e30b62c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 667.857373] env[65503]: DEBUG oslo_concurrency.lockutils [req-37f745af-9adb-4870-ab06-c4a6f9981064 req-a176310b-b237-4459-8574-05fd9dbd7d45 service nova] Acquiring lock "8c274097-234a-44be-9159-c2fb0f1a8da1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.857764] env[65503]: DEBUG oslo_concurrency.lockutils [req-37f745af-9adb-4870-ab06-c4a6f9981064 req-a176310b-b237-4459-8574-05fd9dbd7d45 service nova] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.860279] env[65503]: DEBUG oslo_concurrency.lockutils [req-37f745af-9adb-4870-ab06-c4a6f9981064 req-a176310b-b237-4459-8574-05fd9dbd7d45 service nova] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.860279] env[65503]: DEBUG nova.compute.manager [req-37f745af-9adb-4870-ab06-c4a6f9981064 req-a176310b-b237-4459-8574-05fd9dbd7d45 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] No waiting events found dispatching network-vif-plugged-f303ecf7-0607-45a0-bad8-c2eb7e30b62c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 667.860279] env[65503]: WARNING nova.compute.manager [req-37f745af-9adb-4870-ab06-c4a6f9981064 req-a176310b-b237-4459-8574-05fd9dbd7d45 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Received unexpected event network-vif-plugged-f303ecf7-0607-45a0-bad8-c2eb7e30b62c for instance with vm_state building and task_state spawning. [ 667.943023] env[65503]: DEBUG nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 667.955833] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3d8c87cc-ea38-4c11-b36f-0b481c42c83c tempest-ServersAdmin275Test-404667626 tempest-ServersAdmin275Test-404667626-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.961022] env[65503]: WARNING neutronclient.v2_0.client [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 667.962051] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 667.962572] env[65503]: WARNING openstack [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 667.976838] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.242s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.979254] env[65503]: INFO nova.compute.claims [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.081088] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449587, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.082911] env[65503]: DEBUG nova.compute.manager [None req-1b8e97aa-5cb3-4636-9576-b271c8d063bd tempest-ServerDiagnosticsV248Test-230372599 tempest-ServerDiagnosticsV248Test-230372599-project-admin] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 668.084692] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1308ddb-9e95-4f74-bc68-f545f9a251d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.095199] env[65503]: INFO nova.compute.manager [None req-1b8e97aa-5cb3-4636-9576-b271c8d063bd tempest-ServerDiagnosticsV248Test-230372599 tempest-ServerDiagnosticsV248Test-230372599-project-admin] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Retrieving diagnostics [ 668.096507] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801b2336-3599-436f-bc11-45e56aad39f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.105862] env[65503]: DEBUG nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 668.145923] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.231536] env[65503]: DEBUG nova.network.neutron [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Updating instance_info_cache with network_info: [{"id": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "address": "fa:16:3e:b6:e2:b5", "network": {"id": "1f384090-e7f5-4516-8faf-3069a073e0c8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-745014278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eecbcfbbb0904f57939c23ef03418bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab13f109-bf", "ovs_interfaceid": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 668.280580] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Successfully created port: f78a3ed0-4f14-45aa-afd6-01ab10698376 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 668.587845] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449587, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.635575] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.711391] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Successfully created port: 3c1610d7-879b-45f7-9a0e-9f341fd61000 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 668.736222] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Releasing lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.736671] env[65503]: DEBUG nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Instance network_info: |[{"id": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "address": "fa:16:3e:b6:e2:b5", "network": {"id": "1f384090-e7f5-4516-8faf-3069a073e0c8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-745014278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eecbcfbbb0904f57939c23ef03418bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab13f109-bf", "ovs_interfaceid": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 668.737163] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:e2:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0721b358-3768-472d-95f8-6d6755ab1635', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.747123] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Creating folder: Project (eecbcfbbb0904f57939c23ef03418bd5). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.747123] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0c3fbbd-5ff2-46c6-9547-4ea3dcf69ad6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.762268] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Created folder: Project (eecbcfbbb0904f57939c23ef03418bd5) in parent group-v870190. [ 668.762472] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Creating folder: Instances. Parent ref: group-v870264. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.762731] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62f6ec26-afde-4b55-9634-f2897bb9c93b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.774607] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Created folder: Instances in parent group-v870264. [ 668.774860] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 668.778326] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 668.778632] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-211f7b03-908e-44e2-92f3-e755dc923c0c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.806875] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.806875] env[65503]: value = "task-4449590" [ 668.806875] env[65503]: _type = "Task" [ 668.806875] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.816923] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449590, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.954399] env[65503]: DEBUG nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 668.986440] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 668.990022] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 668.990022] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 668.990022] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 668.990022] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 668.990022] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 668.990022] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 668.990828] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 668.990828] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 668.990828] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 668.990828] env[65503]: DEBUG nova.virt.hardware [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 668.995543] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e40f05-9986-4076-a3f8-dc6ca14767a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.007732] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3826494-3752-49a1-b8f1-a22b0a95baba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.082667] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449587, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.087425] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Successfully created port: ca808092-6ae6-418d-9074-0f8e4b10289a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 669.209487] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Acquiring lock "38e9a714-87f8-422c-9cc5-09b6aec76198" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.209784] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.318017] env[65503]: DEBUG nova.compute.manager [req-d5dcbb92-44a4-4396-9b39-f9618be75458 req-87d051c2-388b-4133-b96a-b8bb3c73b79b service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Received event network-vif-plugged-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 669.319319] env[65503]: DEBUG oslo_concurrency.lockutils [req-d5dcbb92-44a4-4396-9b39-f9618be75458 req-87d051c2-388b-4133-b96a-b8bb3c73b79b service nova] Acquiring lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.319319] env[65503]: DEBUG oslo_concurrency.lockutils [req-d5dcbb92-44a4-4396-9b39-f9618be75458 req-87d051c2-388b-4133-b96a-b8bb3c73b79b service nova] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.319319] env[65503]: DEBUG oslo_concurrency.lockutils [req-d5dcbb92-44a4-4396-9b39-f9618be75458 req-87d051c2-388b-4133-b96a-b8bb3c73b79b service nova] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.319319] env[65503]: DEBUG nova.compute.manager [req-d5dcbb92-44a4-4396-9b39-f9618be75458 req-87d051c2-388b-4133-b96a-b8bb3c73b79b service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] No waiting events found dispatching network-vif-plugged-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 669.319319] env[65503]: WARNING nova.compute.manager [req-d5dcbb92-44a4-4396-9b39-f9618be75458 req-87d051c2-388b-4133-b96a-b8bb3c73b79b service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Received unexpected event network-vif-plugged-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a for instance with vm_state building and task_state spawning. [ 669.326500] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449590, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.540114] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92f88a1-46d1-4994-8144-4462599c9708 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.549177] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54bc7081-8af7-41dc-a74b-124ad018f82e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.584568] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2a805e-e0b2-45ce-8b64-2fa6b9621547 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.596971] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ce4039-e8d3-4bef-9422-679cad520115 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.601024] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449587, 'name': CreateVM_Task, 'duration_secs': 1.654793} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.601215] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 669.602131] env[65503]: WARNING neutronclient.v2_0.client [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 669.602503] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.602665] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.602974] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 669.603355] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6498971e-951b-4aea-a03a-a4b4f6682d3c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.614735] env[65503]: DEBUG nova.compute.provider_tree [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.621516] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 669.621516] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528f97e7-64fa-cf45-0839-e29eae653565" [ 669.621516] env[65503]: _type = "Task" [ 669.621516] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.634454] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528f97e7-64fa-cf45-0839-e29eae653565, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.713200] env[65503]: DEBUG nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 669.824187] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449590, 'name': CreateVM_Task, 'duration_secs': 0.69717} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.824487] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 669.824977] env[65503]: WARNING neutronclient.v2_0.client [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 669.825357] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.118564] env[65503]: DEBUG nova.scheduler.client.report [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.134120] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528f97e7-64fa-cf45-0839-e29eae653565, 'name': SearchDatastore_Task, 'duration_secs': 0.013114} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.134452] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.134680] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 670.134912] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.135065] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.135244] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 670.135552] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.135870] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 670.136126] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-522be7f2-6191-4dcf-83db-e6e457b260f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.139809] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd7b060b-af38-4bd0-9113-ed3edc260c76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.149110] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 670.149110] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f0b396-7a06-7a05-3638-1c4f5f2b0a58" [ 670.149110] env[65503]: _type = "Task" [ 670.149110] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.154785] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 670.155090] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 670.160407] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e7d381e-caf4-4e13-bc99-56e5dde2a23c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.163420] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f0b396-7a06-7a05-3638-1c4f5f2b0a58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.168741] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 670.168741] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a83c0e-a78e-024b-2d3b-b3de50d65e9b" [ 670.168741] env[65503]: _type = "Task" [ 670.168741] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.178686] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a83c0e-a78e-024b-2d3b-b3de50d65e9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.241907] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.627088] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.627728] env[65503]: DEBUG nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 670.630942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.405s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.632442] env[65503]: INFO nova.compute.claims [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.661230] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f0b396-7a06-7a05-3638-1c4f5f2b0a58, 'name': SearchDatastore_Task, 'duration_secs': 0.015798} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.661567] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.661950] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 670.662143] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.680459] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a83c0e-a78e-024b-2d3b-b3de50d65e9b, 'name': SearchDatastore_Task, 'duration_secs': 0.018982} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.682588] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd3e3917-3931-4f5e-919f-1c4e0e5cbad3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.690686] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 670.690686] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528818f5-1185-8a51-8f6f-2cbb3a17f3c7" [ 670.690686] env[65503]: _type = "Task" [ 670.690686] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.702584] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528818f5-1185-8a51-8f6f-2cbb3a17f3c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.832382] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Successfully updated port: f78a3ed0-4f14-45aa-afd6-01ab10698376 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 671.138298] env[65503]: DEBUG nova.compute.utils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 671.143324] env[65503]: DEBUG nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 671.143558] env[65503]: DEBUG nova.network.neutron [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 671.143895] env[65503]: WARNING neutronclient.v2_0.client [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 671.146821] env[65503]: WARNING neutronclient.v2_0.client [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 671.147797] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 671.147851] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 671.208280] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528818f5-1185-8a51-8f6f-2cbb3a17f3c7, 'name': SearchDatastore_Task, 'duration_secs': 0.013395} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.208280] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.208542] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 8c274097-234a-44be-9159-c2fb0f1a8da1/8c274097-234a-44be-9159-c2fb0f1a8da1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.208851] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.209121] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 671.209384] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d64a5083-87f7-4b61-a6e5-1505892b877c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.211982] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05c6f588-32fa-4963-90e9-d959d7783708 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.220912] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 671.220912] env[65503]: value = "task-4449591" [ 671.220912] env[65503]: _type = "Task" [ 671.220912] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.225736] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 671.225968] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 671.228069] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d07999f3-5bb6-4bd1-8099-3bc7e7307466 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.234432] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.243317] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 671.243317] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5229d60c-fbca-8ed4-26f9-c719173bbfb9" [ 671.243317] env[65503]: _type = "Task" [ 671.243317] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.250927] env[65503]: DEBUG nova.policy [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '743114ca4a8e4f2ea902e9c70a79350b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e2d1ec0c216480d83d3ce770375d481', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 671.260316] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5229d60c-fbca-8ed4-26f9-c719173bbfb9, 'name': SearchDatastore_Task, 'duration_secs': 0.011975} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.261245] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70ae90cd-343c-4bd3-ab3e-6dc73f8a73dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.268781] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 671.268781] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52505fd5-6e83-17f6-4537-2647c9d26a50" [ 671.268781] env[65503]: _type = "Task" [ 671.268781] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.287577] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52505fd5-6e83-17f6-4537-2647c9d26a50, 'name': SearchDatastore_Task, 'duration_secs': 0.011283} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.287577] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.287577] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f7b81948-c480-47a4-9d0f-5c2c163bd7f2/f7b81948-c480-47a4-9d0f-5c2c163bd7f2.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.287577] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab7daaea-d42a-4912-8b25-6dc0f9204742 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.294824] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 671.294824] env[65503]: value = "task-4449592" [ 671.294824] env[65503]: _type = "Task" [ 671.294824] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.304881] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.646386] env[65503]: DEBUG nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 671.734382] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50984} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.734647] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 8c274097-234a-44be-9159-c2fb0f1a8da1/8c274097-234a-44be-9159-c2fb0f1a8da1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.734878] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.735298] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8aa0612a-31e0-42ab-b280-a4aed29c500c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.743910] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 671.743910] env[65503]: value = "task-4449593" [ 671.743910] env[65503]: _type = "Task" [ 671.743910] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.768347] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.808301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.808697] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.831619] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449592, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.879939] env[65503]: DEBUG nova.network.neutron [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Successfully created port: fc1177b8-43dc-4887-a8c1-5f9ebadba2be {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 672.243181] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522b1728-2ed5-9699-1a38-aa5630fd3f7a/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 672.244176] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e57005-48f1-40f8-9507-0d3fda27a00c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.256064] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522b1728-2ed5-9699-1a38-aa5630fd3f7a/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 672.256064] env[65503]: ERROR oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522b1728-2ed5-9699-1a38-aa5630fd3f7a/disk-0.vmdk due to incomplete transfer. [ 672.256064] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5200f554-be0e-45cc-bca4-390c50bb2eea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.261073] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.363875} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.261750] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.262666] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c674965-4378-48a5-99a8-3213ebd38b03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.269637] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522b1728-2ed5-9699-1a38-aa5630fd3f7a/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 672.269815] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Uploaded image 8257b445-480c-402b-8269-1eadd665a26f to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 672.273858] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 672.281846] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-622d94f6-a635-4e44-aee6-8b8069eb4b0b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.295027] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 8c274097-234a-44be-9159-c2fb0f1a8da1/8c274097-234a-44be-9159-c2fb0f1a8da1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.296496] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a28cd8f4-12b7-438b-b5a9-33befb0912ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.318365] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 672.318365] env[65503]: value = "task-4449594" [ 672.318365] env[65503]: _type = "Task" [ 672.318365] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.322552] env[65503]: DEBUG nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 672.326639] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.809862} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.336026] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f7b81948-c480-47a4-9d0f-5c2c163bd7f2/f7b81948-c480-47a4-9d0f-5c2c163bd7f2.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.336317] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.337915] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 672.337915] env[65503]: value = "task-4449595" [ 672.337915] env[65503]: _type = "Task" [ 672.337915] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.337915] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68a19550-c728-48e3-acbe-8f6b55554789 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.347098] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449594, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.358148] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58386684-e090-4a48-b2ae-cdc521c4351c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.361019] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449595, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.361419] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 672.361419] env[65503]: value = "task-4449596" [ 672.361419] env[65503]: _type = "Task" [ 672.361419] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.369273] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63d23ee-f8d5-4632-a9e6-ee34f33bb26b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.376598] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449596, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.408594] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad8734a-b636-4a1e-896e-32a237765c02 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.417712] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f62707-8013-449b-9ea8-b7c1955021d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.434091] env[65503]: DEBUG nova.compute.provider_tree [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 672.661197] env[65503]: DEBUG nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 672.687705] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 672.687958] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.688508] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 672.688786] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.688964] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 672.689128] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 672.689625] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.689792] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 672.689954] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 672.690135] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 672.690305] env[65503]: DEBUG nova.virt.hardware [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 672.691273] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed23070-ea83-42b7-8590-88d743f19550 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.701628] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04d6d14-5294-4de2-9622-ac526bc1930a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.832768] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449594, 'name': Destroy_Task, 'duration_secs': 0.396007} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.833496] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Destroyed the VM [ 672.833855] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 672.834290] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0f22f93e-e846-4ecc-904d-3ea00c86d0fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.848463] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 672.848463] env[65503]: value = "task-4449597" [ 672.848463] env[65503]: _type = "Task" [ 672.848463] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.857261] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449595, 'name': ReconfigVM_Task, 'duration_secs': 0.384718} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.858098] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 8c274097-234a-44be-9159-c2fb0f1a8da1/8c274097-234a-44be-9159-c2fb0f1a8da1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.858920] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c0b0327-822f-4248-a66f-6280fd1eed28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.866924] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449597, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.871958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.874761] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 672.874761] env[65503]: value = "task-4449598" [ 672.874761] env[65503]: _type = "Task" [ 672.874761] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.882434] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449596, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.889230] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449598, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.962998] env[65503]: ERROR nova.scheduler.client.report [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [req-bc74624d-d919-4099-a979-a4765502f442] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bc74624d-d919-4099-a979-a4765502f442"}]} [ 673.001181] env[65503]: DEBUG nova.scheduler.client.report [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 673.018794] env[65503]: DEBUG nova.scheduler.client.report [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 673.019236] env[65503]: DEBUG nova.compute.provider_tree [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 673.037540] env[65503]: DEBUG nova.scheduler.client.report [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 673.061513] env[65503]: DEBUG nova.scheduler.client.report [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 673.106273] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Successfully updated port: 3c1610d7-879b-45f7-9a0e-9f341fd61000 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 673.367853] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449597, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.383305] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449596, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.818788} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.383975] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.385495] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b38b6ea-a8c0-40a5-94cf-5c35b213cf09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.396095] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449598, 'name': Rename_Task, 'duration_secs': 0.139543} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.397071] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 673.397362] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f77ef86-17d2-4317-b684-967dc31bfaf3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.423252] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] f7b81948-c480-47a4-9d0f-5c2c163bd7f2/f7b81948-c480-47a4-9d0f-5c2c163bd7f2.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.424863] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6821c950-a7fa-474a-9830-9c7c615b23ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.441793] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 673.441793] env[65503]: value = "task-4449599" [ 673.441793] env[65503]: _type = "Task" [ 673.441793] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.448304] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 673.448304] env[65503]: value = "task-4449600" [ 673.448304] env[65503]: _type = "Task" [ 673.448304] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.455027] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.468711] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.566011] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c39830-e6da-4098-a26a-c5c9f817a9b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.578372] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cee451-eac1-4bc7-907c-acd94341427e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.615763] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25460e8a-5061-437e-a698-17003b73d4a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.624688] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac40ef4f-9296-4f80-a73b-596438c8ba1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.640296] env[65503]: DEBUG nova.compute.provider_tree [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 673.863932] env[65503]: DEBUG oslo_vmware.api [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449597, 'name': RemoveSnapshot_Task, 'duration_secs': 0.556028} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.864327] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 673.864466] env[65503]: INFO nova.compute.manager [None req-ca332265-f8a0-496d-a144-2959b4b24d3f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Took 18.11 seconds to snapshot the instance on the hypervisor. [ 673.959434] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449599, 'name': PowerOnVM_Task} progress is 1%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.963376] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449600, 'name': ReconfigVM_Task, 'duration_secs': 0.341407} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.963840] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Reconfigured VM instance instance-00000019 to attach disk [datastore1] f7b81948-c480-47a4-9d0f-5c2c163bd7f2/f7b81948-c480-47a4-9d0f-5c2c163bd7f2.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.964558] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0fee6c7-59cf-497c-89a2-ea8556c22ae7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.973595] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 673.973595] env[65503]: value = "task-4449601" [ 673.973595] env[65503]: _type = "Task" [ 673.973595] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.991576] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449601, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.057431] env[65503]: DEBUG nova.network.neutron [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Successfully updated port: fc1177b8-43dc-4887-a8c1-5f9ebadba2be {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 674.181856] env[65503]: DEBUG nova.scheduler.client.report [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 674.182650] env[65503]: DEBUG nova.compute.provider_tree [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 42 to 43 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 674.182650] env[65503]: DEBUG nova.compute.provider_tree [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 674.453394] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449599, 'name': PowerOnVM_Task} progress is 64%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.484972] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449601, 'name': Rename_Task, 'duration_secs': 0.147021} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.485331] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.485629] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bcb72b0-3f70-4f3e-a64f-851b448c0327 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.494308] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 674.494308] env[65503]: value = "task-4449602" [ 674.494308] env[65503]: _type = "Task" [ 674.494308] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.503745] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.560388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "refresh_cache-3ac287b4-2538-472b-84ac-7fed3c2ffff3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.560560] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquired lock "refresh_cache-3ac287b4-2538-472b-84ac-7fed3c2ffff3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.561654] env[65503]: DEBUG nova.network.neutron [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 674.689123] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.058s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.690219] env[65503]: DEBUG nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 674.696408] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 16.516s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.954644] env[65503]: DEBUG oslo_vmware.api [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449599, 'name': PowerOnVM_Task, 'duration_secs': 1.329464} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.955022] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 674.955022] env[65503]: INFO nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Took 12.51 seconds to spawn the instance on the hypervisor. [ 674.955022] env[65503]: DEBUG nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 674.955945] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ad3157-5730-46cd-8c36-2b402d317bba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.012603] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449602, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.066632] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 675.067149] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 675.199229] env[65503]: DEBUG nova.compute.utils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 675.204335] env[65503]: DEBUG nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 675.204579] env[65503]: DEBUG nova.network.neutron [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 675.205152] env[65503]: WARNING neutronclient.v2_0.client [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 675.205267] env[65503]: WARNING neutronclient.v2_0.client [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 675.205892] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 675.206283] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 675.474390] env[65503]: INFO nova.compute.manager [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Took 34.40 seconds to build instance. [ 675.509757] env[65503]: DEBUG oslo_vmware.api [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449602, 'name': PowerOnVM_Task, 'duration_secs': 0.565821} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.509757] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.509757] env[65503]: INFO nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Took 10.34 seconds to spawn the instance on the hypervisor. [ 675.510395] env[65503]: DEBUG nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 675.510838] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be35fbaa-4764-4486-a244-06284a3cb240 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.705401] env[65503]: DEBUG nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 675.729508] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908d65aa-3f53-48a6-b634-321f1a02a92e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.735877] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961bf3b4-5461-4561-bbef-cb03b993436b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.769878] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68b2708-7d77-4d9a-8c18-734dddb3d6fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.778933] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c1cc7f-78bf-4d33-af77-3002d9f60f0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.796479] env[65503]: DEBUG nova.compute.provider_tree [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.941087] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Successfully updated port: ca808092-6ae6-418d-9074-0f8e4b10289a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 675.977255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3107096b-0e29-410f-b6af-81dd24813374 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.917s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.040581] env[65503]: INFO nova.compute.manager [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Took 30.21 seconds to build instance. [ 676.201272] env[65503]: DEBUG nova.compute.manager [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Received event network-changed-f303ecf7-0607-45a0-bad8-c2eb7e30b62c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 676.201463] env[65503]: DEBUG nova.compute.manager [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Refreshing instance network info cache due to event network-changed-f303ecf7-0607-45a0-bad8-c2eb7e30b62c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 676.201747] env[65503]: DEBUG oslo_concurrency.lockutils [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Acquiring lock "refresh_cache-8c274097-234a-44be-9159-c2fb0f1a8da1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.201936] env[65503]: DEBUG oslo_concurrency.lockutils [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Acquired lock "refresh_cache-8c274097-234a-44be-9159-c2fb0f1a8da1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.202107] env[65503]: DEBUG nova.network.neutron [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Refreshing network info cache for port f303ecf7-0607-45a0-bad8-c2eb7e30b62c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 676.299685] env[65503]: DEBUG nova.scheduler.client.report [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.318870] env[65503]: DEBUG nova.network.neutron [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 676.447999] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.447999] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.447999] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 676.494691] env[65503]: DEBUG nova.policy [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57d5e5dcf5ab42aebc9fc3b63dea6847', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a1949e411a84b0ab8d3a249bf80f92f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 676.545958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5d4b7af9-ec42-4ee4-98c8-42f2483e6f24 tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.734s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.560961] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 676.561425] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 676.705517] env[65503]: WARNING neutronclient.v2_0.client [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 676.708016] env[65503]: WARNING openstack [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 676.708016] env[65503]: WARNING openstack [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 676.723487] env[65503]: DEBUG nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 676.760665] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 676.761498] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 676.761721] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 676.762234] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 676.762234] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 676.762427] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 676.762684] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.762876] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 676.766021] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 676.766021] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 676.766021] env[65503]: DEBUG nova.virt.hardware [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 676.766021] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822ad479-7946-42ba-ac2c-53f5587a7a25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.773673] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f26227-b7e2-4f84-aef7-8578d77993f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.953907] env[65503]: WARNING neutronclient.v2_0.client [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 676.954104] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 676.954953] env[65503]: WARNING openstack [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 676.964149] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 676.964529] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 676.980444] env[65503]: DEBUG nova.network.neutron [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Successfully created port: 0aa00537-b95b-4252-b80d-90e59542088b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 677.121914] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 677.250257] env[65503]: WARNING openstack [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 677.253044] env[65503]: WARNING openstack [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 677.293302] env[65503]: DEBUG nova.network.neutron [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Updating instance_info_cache with network_info: [{"id": "fc1177b8-43dc-4887-a8c1-5f9ebadba2be", "address": "fa:16:3e:02:96:7c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc1177b8-43", "ovs_interfaceid": "fc1177b8-43dc-4887-a8c1-5f9ebadba2be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 677.309957] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 677.310414] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 677.318508] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.624s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.324371] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.018s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.326892] env[65503]: INFO nova.compute.claims [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.796104] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Releasing lock "refresh_cache-3ac287b4-2538-472b-84ac-7fed3c2ffff3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.796784] env[65503]: DEBUG nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Instance network_info: |[{"id": "fc1177b8-43dc-4887-a8c1-5f9ebadba2be", "address": "fa:16:3e:02:96:7c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc1177b8-43", "ovs_interfaceid": "fc1177b8-43dc-4887-a8c1-5f9ebadba2be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 677.797323] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:96:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc1177b8-43dc-4887-a8c1-5f9ebadba2be', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.807965] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Creating folder: Project (3e2d1ec0c216480d83d3ce770375d481). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.809035] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65ce968f-3bcb-4ef3-9cba-19618dd1aee4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.823544] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Created folder: Project (3e2d1ec0c216480d83d3ce770375d481) in parent group-v870190. [ 677.823812] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Creating folder: Instances. Parent ref: group-v870267. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.824134] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-174ce201-a57c-4985-9128-376cbf35df33 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.842122] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Created folder: Instances in parent group-v870267. [ 677.842404] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 677.843078] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 677.843318] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fe71a3e-5ca3-4eff-b396-7bb5354074a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.867250] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 677.867250] env[65503]: value = "task-4449605" [ 677.867250] env[65503]: _type = "Task" [ 677.867250] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.881465] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449605, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.888930] env[65503]: DEBUG nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Received event network-changed-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 677.889258] env[65503]: DEBUG nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Refreshing instance network info cache due to event network-changed-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 677.889955] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Acquiring lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.889955] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Acquired lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.889955] env[65503]: DEBUG nova.network.neutron [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Refreshing network info cache for port ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 677.929031] env[65503]: INFO nova.scheduler.client.report [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Deleted allocation for migration b59abcc2-79e0-4676-be84-c5f88601d520 [ 678.214668] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "24e054d7-7662-47ef-8f69-4738c5ff9548" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.215317] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.215543] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "24e054d7-7662-47ef-8f69-4738c5ff9548-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.215745] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.215919] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.219088] env[65503]: INFO nova.compute.manager [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Terminating instance [ 678.386299] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449605, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.399827] env[65503]: WARNING neutronclient.v2_0.client [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 678.399827] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 678.399827] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.437955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f9d2ed2-1154-459e-a4b3-43dc542e4cb8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 24.972s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.441529] env[65503]: WARNING neutronclient.v2_0.client [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 678.441659] env[65503]: WARNING openstack [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 678.442181] env[65503]: WARNING openstack [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.484266] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 678.485195] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 678.485867] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.723470] env[65503]: DEBUG nova.compute.manager [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 678.723762] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 678.724939] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551f1e34-d4aa-4e53-86a5-2782ba9c73f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.734990] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 678.734990] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac17c412-0daf-4d19-965a-97ca433ca5eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.745776] env[65503]: DEBUG oslo_vmware.api [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 678.745776] env[65503]: value = "task-4449606" [ 678.745776] env[65503]: _type = "Task" [ 678.745776] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.758074] env[65503]: DEBUG oslo_vmware.api [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.785138] env[65503]: DEBUG nova.network.neutron [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Successfully updated port: 0aa00537-b95b-4252-b80d-90e59542088b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 678.883929] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449605, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.889602] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5495392e-9f9a-4695-9935-bbb57923e658 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.904716] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029d7bc3-cd48-4e61-a7f9-a06fb648eee3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.944078] env[65503]: DEBUG nova.compute.manager [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 678.945964] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b061f3d-080a-4880-b5fa-d4b1fdc41383 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.950161] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ce6072-bff9-418f-8d03-cf44205a8e0f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.963073] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0975f85-d1bd-47bc-928f-0acd677305a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.985728] env[65503]: DEBUG nova.compute.provider_tree [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.075291] env[65503]: DEBUG nova.network.neutron [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Updated VIF entry in instance network info cache for port f303ecf7-0607-45a0-bad8-c2eb7e30b62c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 679.075786] env[65503]: DEBUG nova.network.neutron [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Updating instance_info_cache with network_info: [{"id": "f303ecf7-0607-45a0-bad8-c2eb7e30b62c", "address": "fa:16:3e:d7:46:b0", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf303ecf7-06", "ovs_interfaceid": "f303ecf7-0607-45a0-bad8-c2eb7e30b62c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 679.179891] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.179994] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.197107] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.197649] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.258947] env[65503]: DEBUG oslo_vmware.api [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449606, 'name': PowerOffVM_Task, 'duration_secs': 0.417539} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.259301] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 679.259489] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 679.259954] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f311af51-98cd-4a16-a8b9-3a39c2d2231c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.276023] env[65503]: WARNING neutronclient.v2_0.client [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 679.276283] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.276623] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.288483] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "refresh_cache-80cf5690-8a18-471a-b02f-3b7b9e539c0d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.288649] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquired lock "refresh_cache-80cf5690-8a18-471a-b02f-3b7b9e539c0d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.288820] env[65503]: DEBUG nova.network.neutron [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 679.329851] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 679.330466] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.330901] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.341179] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 679.341464] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 679.341922] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Deleting the datastore file [datastore2] 24e054d7-7662-47ef-8f69-4738c5ff9548 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 679.341922] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-245a4903-f765-405a-adaf-ccf8d9369eab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.348866] env[65503]: DEBUG oslo_vmware.api [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for the task: (returnval){ [ 679.348866] env[65503]: value = "task-4449608" [ 679.348866] env[65503]: _type = "Task" [ 679.348866] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.365283] env[65503]: DEBUG oslo_vmware.api [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.380487] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449605, 'name': CreateVM_Task, 'duration_secs': 1.481444} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.380674] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.381235] env[65503]: WARNING neutronclient.v2_0.client [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 679.381572] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.381730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.382045] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 679.382311] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bec2597-3166-4e57-b79d-f039e8ebc1bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.388373] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 679.388373] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5239423e-f951-69f6-7e7b-1240dbbf0675" [ 679.388373] env[65503]: _type = "Task" [ 679.388373] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.397262] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5239423e-f951-69f6-7e7b-1240dbbf0675, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.471537] env[65503]: INFO nova.compute.manager [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] instance snapshotting [ 679.479203] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f484e1-204d-4af0-9782-396d5266b44f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.504032] env[65503]: DEBUG nova.scheduler.client.report [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.511330] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f00a2d5-c1b7-47f1-b61f-374c37fde33f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.538649] env[65503]: DEBUG nova.network.neutron [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Updated VIF entry in instance network info cache for port ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 679.538649] env[65503]: DEBUG nova.network.neutron [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Updating instance_info_cache with network_info: [{"id": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "address": "fa:16:3e:b6:e2:b5", "network": {"id": "1f384090-e7f5-4516-8faf-3069a073e0c8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-745014278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eecbcfbbb0904f57939c23ef03418bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab13f109-bf", "ovs_interfaceid": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 679.573179] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.573551] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.582696] env[65503]: DEBUG oslo_concurrency.lockutils [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] Releasing lock "refresh_cache-8c274097-234a-44be-9159-c2fb0f1a8da1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.582696] env[65503]: DEBUG nova.compute.manager [req-9a3ca9f8-9d43-41f0-acd3-7b6d213e4890 req-cb70ff54-86ee-4118-a3e8-99caa9674cd8 service nova] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Received event network-vif-deleted-88cbb4cd-7967-47a9-9cda-a3456020aefa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 679.644521] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 679.644915] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.645649] env[65503]: WARNING openstack [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.739904] env[65503]: DEBUG nova.network.neutron [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updating instance_info_cache with network_info: [{"id": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "address": "fa:16:3e:6f:e6:a5", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78a3ed0-4f", "ovs_interfaceid": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "address": "fa:16:3e:66:a5:59", "network": {"id": "c298e7f5-c2c4-4a56-981f-07c28bfc1dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1910253340", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1610d7-87", "ovs_interfaceid": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ca808092-6ae6-418d-9074-0f8e4b10289a", "address": "fa:16:3e:fd:89:ae", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca808092-6a", "ovs_interfaceid": "ca808092-6ae6-418d-9074-0f8e4b10289a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 679.792470] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.792979] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.838842] env[65503]: DEBUG nova.network.neutron [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 679.860048] env[65503]: DEBUG oslo_vmware.api [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.870117] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.870549] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.901285] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5239423e-f951-69f6-7e7b-1240dbbf0675, 'name': SearchDatastore_Task, 'duration_secs': 0.011608} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.901639] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.901832] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.902078] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.902219] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.902643] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.905909] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c829535-4397-4e04-8f79-e43d1f53ac6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.916261] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.916479] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 679.917773] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f5bf3a6-142d-4eb3-995d-2269b6b98815 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.923694] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 679.923694] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526e57a4-256a-2698-7ac0-591365be5d3f" [ 679.923694] env[65503]: _type = "Task" [ 679.923694] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.939106] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526e57a4-256a-2698-7ac0-591365be5d3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.959296] env[65503]: WARNING neutronclient.v2_0.client [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 679.959296] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.959645] env[65503]: WARNING openstack [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.010273] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.011501] env[65503]: DEBUG nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 680.017022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.356s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.017022] env[65503]: DEBUG nova.objects.instance [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lazy-loading 'resources' on Instance uuid 50f11559-b8c7-41a2-aa43-255a28ffa58c {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 680.025139] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 680.025655] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5f689c2f-a1cb-42bc-90ff-0c41c0027a6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.038618] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 680.038618] env[65503]: value = "task-4449609" [ 680.038618] env[65503]: _type = "Task" [ 680.038618] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.047110] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Releasing lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.048129] env[65503]: DEBUG nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Received event network-vif-deleted-0d0ad262-a7c3-46b2-962a-ae6db1e6279d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 680.048129] env[65503]: DEBUG nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-vif-plugged-f78a3ed0-4f14-45aa-afd6-01ab10698376 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 680.048129] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Acquiring lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.048129] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.048513] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.048868] env[65503]: DEBUG nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] No waiting events found dispatching network-vif-plugged-f78a3ed0-4f14-45aa-afd6-01ab10698376 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 680.049057] env[65503]: WARNING nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received unexpected event network-vif-plugged-f78a3ed0-4f14-45aa-afd6-01ab10698376 for instance with vm_state building and task_state spawning. [ 680.049214] env[65503]: DEBUG nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-changed-f78a3ed0-4f14-45aa-afd6-01ab10698376 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 680.049356] env[65503]: DEBUG nova.compute.manager [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Refreshing instance network info cache due to event network-changed-f78a3ed0-4f14-45aa-afd6-01ab10698376. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 680.049518] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Acquiring lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.060128] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449609, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.142602] env[65503]: DEBUG nova.network.neutron [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Updating instance_info_cache with network_info: [{"id": "0aa00537-b95b-4252-b80d-90e59542088b", "address": "fa:16:3e:e4:4d:2b", "network": {"id": "d2bc52b3-2c24-4b80-8693-024d8ed8c87d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1108649423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8a1949e411a84b0ab8d3a249bf80f92f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aa00537-b9", "ovs_interfaceid": "0aa00537-b95b-4252-b80d-90e59542088b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 680.244961] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Releasing lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.245247] env[65503]: DEBUG nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Instance network_info: |[{"id": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "address": "fa:16:3e:6f:e6:a5", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78a3ed0-4f", "ovs_interfaceid": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "address": "fa:16:3e:66:a5:59", "network": {"id": "c298e7f5-c2c4-4a56-981f-07c28bfc1dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1910253340", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1610d7-87", "ovs_interfaceid": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ca808092-6ae6-418d-9074-0f8e4b10289a", "address": "fa:16:3e:fd:89:ae", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca808092-6a", "ovs_interfaceid": "ca808092-6ae6-418d-9074-0f8e4b10289a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 680.245612] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Acquired lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.245785] env[65503]: DEBUG nova.network.neutron [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Refreshing network info cache for port f78a3ed0-4f14-45aa-afd6-01ab10698376 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 680.248415] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:e6:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f78a3ed0-4f14-45aa-afd6-01ab10698376', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:a5:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c1610d7-879b-45f7-9a0e-9f341fd61000', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:89:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca808092-6ae6-418d-9074-0f8e4b10289a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.258666] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Creating folder: Project (ae68676f87fd4edc802b2e4b4917ceec). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.259869] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e73662c3-43ba-4867-804c-6294598d75e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.274407] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Created folder: Project (ae68676f87fd4edc802b2e4b4917ceec) in parent group-v870190. [ 680.274407] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Creating folder: Instances. Parent ref: group-v870270. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.275672] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d3e43a2-0414-4322-9b3b-24c78e17a8af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.290649] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Created folder: Instances in parent group-v870270. [ 680.290971] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 680.291220] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.292380] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2eef509f-532b-4958-b958-3073cefe37dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.318940] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.318940] env[65503]: value = "task-4449612" [ 680.318940] env[65503]: _type = "Task" [ 680.318940] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.329941] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449612, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.361635] env[65503]: DEBUG oslo_vmware.api [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Task: {'id': task-4449608, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.550037} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.362408] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 680.362706] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 680.362841] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 680.363055] env[65503]: INFO nova.compute.manager [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Took 1.64 seconds to destroy the instance on the hypervisor. [ 680.363390] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 680.363659] env[65503]: DEBUG nova.compute.manager [-] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 680.363762] env[65503]: DEBUG nova.network.neutron [-] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 680.364053] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.364757] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 680.365118] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.409867] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.438997] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526e57a4-256a-2698-7ac0-591365be5d3f, 'name': SearchDatastore_Task, 'duration_secs': 0.023373} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.443782] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf4324a8-5e31-408a-a144-34cee8e2ed83 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.450950] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 680.450950] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a92a4d-a427-3391-0b55-05617ff3e5f1" [ 680.450950] env[65503]: _type = "Task" [ 680.450950] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.461428] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a92a4d-a427-3391-0b55-05617ff3e5f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.519459] env[65503]: DEBUG nova.compute.utils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 680.521226] env[65503]: DEBUG nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 680.521352] env[65503]: DEBUG nova.network.neutron [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 680.521813] env[65503]: WARNING neutronclient.v2_0.client [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.522230] env[65503]: WARNING neutronclient.v2_0.client [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.523120] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 680.523645] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.550957] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449609, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.646668] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Releasing lock "refresh_cache-80cf5690-8a18-471a-b02f-3b7b9e539c0d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.646668] env[65503]: DEBUG nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Instance network_info: |[{"id": "0aa00537-b95b-4252-b80d-90e59542088b", "address": "fa:16:3e:e4:4d:2b", "network": {"id": "d2bc52b3-2c24-4b80-8693-024d8ed8c87d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1108649423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8a1949e411a84b0ab8d3a249bf80f92f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aa00537-b9", "ovs_interfaceid": "0aa00537-b95b-4252-b80d-90e59542088b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 680.647181] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:4d:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0aa00537-b95b-4252-b80d-90e59542088b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.657167] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Creating folder: Project (8a1949e411a84b0ab8d3a249bf80f92f). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.660399] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c1aa8e0-f181-42e5-9f58-62bbd9e8b539 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.673555] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Created folder: Project (8a1949e411a84b0ab8d3a249bf80f92f) in parent group-v870190. [ 680.674091] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Creating folder: Instances. Parent ref: group-v870274. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.674091] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7cc24c7-3cf3-4659-9060-af047a2b0e34 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.688032] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Created folder: Instances in parent group-v870274. [ 680.689489] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 680.689489] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.689489] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03f932e3-823a-4d38-8249-378418a75e46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.722735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.722735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.725716] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "34008711-b51b-467b-b972-bfda1023d696" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.725716] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "34008711-b51b-467b-b972-bfda1023d696" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.725716] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.725716] env[65503]: value = "task-4449615" [ 680.725716] env[65503]: _type = "Task" [ 680.725716] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.740349] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449615, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.761461] env[65503]: WARNING neutronclient.v2_0.client [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.761986] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 680.762840] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.785424] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f352fed9-d28d-4fbc-8a13-347562020441 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.801235] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2c15a609-7ec8-4bf2-88a5-f9ec9745781e tempest-ServersAdminNegativeTestJSON-2077671623 tempest-ServersAdminNegativeTestJSON-2077671623-project-admin] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Suspending the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 680.801235] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-56b200c3-ac57-4cbc-b10f-db1f6435173e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.813591] env[65503]: DEBUG oslo_vmware.api [None req-2c15a609-7ec8-4bf2-88a5-f9ec9745781e tempest-ServersAdminNegativeTestJSON-2077671623 tempest-ServersAdminNegativeTestJSON-2077671623-project-admin] Waiting for the task: (returnval){ [ 680.813591] env[65503]: value = "task-4449616" [ 680.813591] env[65503]: _type = "Task" [ 680.813591] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.829620] env[65503]: DEBUG oslo_vmware.api [None req-2c15a609-7ec8-4bf2-88a5-f9ec9745781e tempest-ServersAdminNegativeTestJSON-2077671623 tempest-ServersAdminNegativeTestJSON-2077671623-project-admin] Task: {'id': task-4449616, 'name': SuspendVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.844033] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449612, 'name': CreateVM_Task, 'duration_secs': 0.513849} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.844033] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 680.844033] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.844033] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.844033] env[65503]: WARNING neutronclient.v2_0.client [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.844033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.844033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.844679] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 680.845090] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-132c1927-4126-418f-a546-dc3421473fd2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.852966] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 680.852966] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f41c8-1ced-7c72-8b46-3b4121b5d97e" [ 680.852966] env[65503]: _type = "Task" [ 680.852966] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.880452] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f41c8-1ced-7c72-8b46-3b4121b5d97e, 'name': SearchDatastore_Task, 'duration_secs': 0.013973} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.880774] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.881106] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 680.881401] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.887137] env[65503]: DEBUG nova.policy [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 680.967941] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a92a4d-a427-3391-0b55-05617ff3e5f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011755} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.968528] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.968528] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 3ac287b4-2538-472b-84ac-7fed3c2ffff3/3ac287b4-2538-472b-84ac-7fed3c2ffff3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 680.968913] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.969034] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 680.969330] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23861438-fd16-42f5-b4f5-395d8c00d932 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.976085] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59e94626-4389-4024-b40e-1db087d40fb7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.981079] env[65503]: DEBUG nova.compute.manager [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-vif-plugged-3c1610d7-879b-45f7-9a0e-9f341fd61000 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 680.981079] env[65503]: DEBUG oslo_concurrency.lockutils [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Acquiring lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.981079] env[65503]: DEBUG oslo_concurrency.lockutils [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.981079] env[65503]: DEBUG oslo_concurrency.lockutils [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.981079] env[65503]: DEBUG nova.compute.manager [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] No waiting events found dispatching network-vif-plugged-3c1610d7-879b-45f7-9a0e-9f341fd61000 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 680.981079] env[65503]: WARNING nova.compute.manager [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received unexpected event network-vif-plugged-3c1610d7-879b-45f7-9a0e-9f341fd61000 for instance with vm_state building and task_state spawning. [ 680.981079] env[65503]: DEBUG nova.compute.manager [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-changed-3c1610d7-879b-45f7-9a0e-9f341fd61000 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 680.981079] env[65503]: DEBUG nova.compute.manager [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Refreshing instance network info cache due to event network-changed-3c1610d7-879b-45f7-9a0e-9f341fd61000. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 680.981079] env[65503]: DEBUG oslo_concurrency.lockutils [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Acquiring lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.995769] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 680.995769] env[65503]: value = "task-4449617" [ 680.995769] env[65503]: _type = "Task" [ 680.995769] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.005464] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.005639] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.006999] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9416df33-beae-40ba-bd77-a6a46a131bde {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.014559] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.028147] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 681.028147] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 681.040554] env[65503]: DEBUG nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 681.050732] env[65503]: DEBUG nova.compute.manager [None req-d95e3d6f-ae57-437b-a405-d3d6c3433307 tempest-ServerDiagnosticsV248Test-230372599 tempest-ServerDiagnosticsV248Test-230372599-project-admin] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 681.052210] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 681.052210] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210a8f1-a347-1be9-48ef-d617e250a56c" [ 681.052210] env[65503]: _type = "Task" [ 681.052210] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.061918] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0469e1-696d-4c39-b384-e88e5cf0c076 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.091626] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449609, 'name': CreateSnapshot_Task, 'duration_secs': 0.519445} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.099366] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 681.100069] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210a8f1-a347-1be9-48ef-d617e250a56c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.100069] env[65503]: INFO nova.compute.manager [None req-d95e3d6f-ae57-437b-a405-d3d6c3433307 tempest-ServerDiagnosticsV248Test-230372599 tempest-ServerDiagnosticsV248Test-230372599-project-admin] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Retrieving diagnostics [ 681.110480] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b96e43c-188f-422e-b705-db886dda6419 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.114506] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6acfa6-8f69-4964-94a1-dbb5e7ef211e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.173824] env[65503]: WARNING neutronclient.v2_0.client [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 681.173824] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 681.173824] env[65503]: WARNING openstack [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 681.224559] env[65503]: DEBUG nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 681.229927] env[65503]: DEBUG nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 681.237145] env[65503]: DEBUG nova.network.neutron [-] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 681.251450] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449615, 'name': CreateVM_Task, 'duration_secs': 0.47901} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.259179] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 681.260143] env[65503]: WARNING neutronclient.v2_0.client [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 681.260534] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.260757] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.261161] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 681.261443] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5883d7a5-63c2-4fdf-8225-b5740eb2fb9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.271451] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 681.271451] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dce5f5-2e98-ff36-8663-e5efd4c95c97" [ 681.271451] env[65503]: _type = "Task" [ 681.271451] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.282512] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dce5f5-2e98-ff36-8663-e5efd4c95c97, 'name': SearchDatastore_Task, 'duration_secs': 0.011459} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.282971] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.283186] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.285288] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.331591] env[65503]: DEBUG oslo_vmware.api [None req-2c15a609-7ec8-4bf2-88a5-f9ec9745781e tempest-ServersAdminNegativeTestJSON-2077671623 tempest-ServersAdminNegativeTestJSON-2077671623-project-admin] Task: {'id': task-4449616, 'name': SuspendVM_Task} progress is 58%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.333414] env[65503]: DEBUG nova.network.neutron [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updated VIF entry in instance network info cache for port f78a3ed0-4f14-45aa-afd6-01ab10698376. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 681.334728] env[65503]: DEBUG nova.network.neutron [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updating instance_info_cache with network_info: [{"id": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "address": "fa:16:3e:6f:e6:a5", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78a3ed0-4f", "ovs_interfaceid": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "address": "fa:16:3e:66:a5:59", "network": {"id": "c298e7f5-c2c4-4a56-981f-07c28bfc1dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1910253340", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1610d7-87", "ovs_interfaceid": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ca808092-6ae6-418d-9074-0f8e4b10289a", "address": "fa:16:3e:fd:89:ae", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca808092-6a", "ovs_interfaceid": "ca808092-6ae6-418d-9074-0f8e4b10289a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 681.356723] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ed2c33-606f-4d2a-99a7-8b9aadd1a72e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.366380] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfa786e-5762-46bc-8cf4-1a2fcc0ccec3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.403262] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03298bd7-6a0c-4a52-a001-2b1f40fa5496 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.415257] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d739f826-b042-4b2a-9b55-ed5fe76e86a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.432511] env[65503]: DEBUG nova.compute.provider_tree [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.513792] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449617, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.514945] env[65503]: DEBUG nova.network.neutron [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Successfully created port: f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 681.582063] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210a8f1-a347-1be9-48ef-d617e250a56c, 'name': SearchDatastore_Task, 'duration_secs': 0.070755} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.583396] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b622b7c-68d5-4822-993d-0682ddcce8d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.593619] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 681.593619] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f2275a-a5c9-9ce5-420d-654d4a289f1d" [ 681.593619] env[65503]: _type = "Task" [ 681.593619] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.607761] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f2275a-a5c9-9ce5-420d-654d4a289f1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.678270] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 681.679078] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c0f8d299-90bd-4b4e-a1dc-c68865c6a78d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.689922] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Received event network-vif-plugged-fc1177b8-43dc-4887-a8c1-5f9ebadba2be {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 681.689922] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquiring lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.689922] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.690191] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.690243] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] No waiting events found dispatching network-vif-plugged-fc1177b8-43dc-4887-a8c1-5f9ebadba2be {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 681.690355] env[65503]: WARNING nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Received unexpected event network-vif-plugged-fc1177b8-43dc-4887-a8c1-5f9ebadba2be for instance with vm_state building and task_state spawning. [ 681.690510] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Received event network-changed-fc1177b8-43dc-4887-a8c1-5f9ebadba2be {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 681.690656] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Refreshing instance network info cache due to event network-changed-fc1177b8-43dc-4887-a8c1-5f9ebadba2be. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 681.690884] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquiring lock "refresh_cache-3ac287b4-2538-472b-84ac-7fed3c2ffff3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.691038] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquired lock "refresh_cache-3ac287b4-2538-472b-84ac-7fed3c2ffff3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.691380] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Refreshing network info cache for port fc1177b8-43dc-4887-a8c1-5f9ebadba2be {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 681.702344] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 681.702344] env[65503]: value = "task-4449618" [ 681.702344] env[65503]: _type = "Task" [ 681.702344] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.718870] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449618, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.746741] env[65503]: INFO nova.compute.manager [-] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Took 1.38 seconds to deallocate network for instance. [ 681.780318] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.783199] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.834087] env[65503]: DEBUG oslo_vmware.api [None req-2c15a609-7ec8-4bf2-88a5-f9ec9745781e tempest-ServersAdminNegativeTestJSON-2077671623 tempest-ServersAdminNegativeTestJSON-2077671623-project-admin] Task: {'id': task-4449616, 'name': SuspendVM_Task, 'duration_secs': 0.972433} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.835513] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2c15a609-7ec8-4bf2-88a5-f9ec9745781e tempest-ServersAdminNegativeTestJSON-2077671623 tempest-ServersAdminNegativeTestJSON-2077671623-project-admin] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Suspended the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 681.835513] env[65503]: DEBUG nova.compute.manager [None req-2c15a609-7ec8-4bf2-88a5-f9ec9745781e tempest-ServersAdminNegativeTestJSON-2077671623 tempest-ServersAdminNegativeTestJSON-2077671623-project-admin] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 681.836453] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffdc30b-2825-45ac-9c9b-906a71f4074a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.839584] env[65503]: DEBUG oslo_concurrency.lockutils [req-cf99d2fb-a5ed-4325-91ba-5bc41dd98a6f req-26966495-5912-409e-bff1-a203994f4d74 service nova] Releasing lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.840104] env[65503]: DEBUG oslo_concurrency.lockutils [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Acquired lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.841100] env[65503]: DEBUG nova.network.neutron [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Refreshing network info cache for port 3c1610d7-879b-45f7-9a0e-9f341fd61000 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 681.935796] env[65503]: DEBUG nova.scheduler.client.report [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.015242] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726227} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.015678] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 3ac287b4-2538-472b-84ac-7fed3c2ffff3/3ac287b4-2538-472b-84ac-7fed3c2ffff3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 682.015800] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 682.016759] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1e56e22-8dc3-4b1b-9759-d322e1e2d1f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.030134] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 682.030134] env[65503]: value = "task-4449619" [ 682.030134] env[65503]: _type = "Task" [ 682.030134] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.043851] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.063099] env[65503]: DEBUG nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 682.109101] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f2275a-a5c9-9ce5-420d-654d4a289f1d, 'name': SearchDatastore_Task, 'duration_secs': 0.057515} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.113288] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 682.113288] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.113288] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 682.113554] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.113598] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 682.114041] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 682.114367] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 682.114541] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 682.115510] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 682.115565] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 682.116217] env[65503]: DEBUG nova.virt.hardware [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 682.116599] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.116858] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] fd548bd7-b686-43ef-83a7-c40addf8ba75/fd548bd7-b686-43ef-83a7-c40addf8ba75.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.119300] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94916787-c70a-49f1-b460-7ce97ae01fbf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.123355] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.123590] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 682.123849] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2920fe21-13b8-4cbb-b049-c3c9204ee003 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.129657] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-054ced3c-666a-43f1-a5c9-6f822c83696f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.139867] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67cc6ed-626c-4f62-8514-641bcd34c242 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.147753] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 682.147753] env[65503]: value = "task-4449620" [ 682.147753] env[65503]: _type = "Task" [ 682.147753] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.150198] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 682.150198] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 682.156496] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad69136a-1be9-459b-a0a3-27ec61761819 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.173635] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 682.173635] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5250f25a-c849-8156-c38f-c21ef8099fad" [ 682.173635] env[65503]: _type = "Task" [ 682.173635] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.177582] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.187950] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5250f25a-c849-8156-c38f-c21ef8099fad, 'name': SearchDatastore_Task, 'duration_secs': 0.0109} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.188907] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f0cd6c2-6873-4700-87a4-8ab1abb724bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.195481] env[65503]: WARNING neutronclient.v2_0.client [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 682.195711] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.197390] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.205299] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 682.205299] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526253d8-1758-b460-9398-660158fc6b19" [ 682.205299] env[65503]: _type = "Task" [ 682.205299] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.223730] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449618, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.223967] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526253d8-1758-b460-9398-660158fc6b19, 'name': SearchDatastore_Task, 'duration_secs': 0.011286} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.224859] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.225100] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 80cf5690-8a18-471a-b02f-3b7b9e539c0d/80cf5690-8a18-471a-b02f-3b7b9e539c0d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.225672] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45e2fff0-f529-44d6-82d9-56bdc42ee78a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.234306] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 682.234306] env[65503]: value = "task-4449621" [ 682.234306] env[65503]: _type = "Task" [ 682.234306] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.245903] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.256877] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.344540] env[65503]: WARNING neutronclient.v2_0.client [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 682.345317] env[65503]: WARNING openstack [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.345711] env[65503]: WARNING openstack [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.444975] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.430s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.448361] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.506s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.448624] env[65503]: DEBUG nova.objects.instance [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'resources' on Instance uuid 174c806e-c2e8-4064-8800-d4a35c19f5e6 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 682.472537] env[65503]: INFO nova.scheduler.client.report [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Deleted allocations for instance 50f11559-b8c7-41a2-aa43-255a28ffa58c [ 682.546274] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.23746} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.549507] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 682.551895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f315e51e-8fcf-477e-8761-9380be76e79b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.587829] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 3ac287b4-2538-472b-84ac-7fed3c2ffff3/3ac287b4-2538-472b-84ac-7fed3c2ffff3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 682.590776] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8df6c22-d527-4068-88bf-dffdad9cde8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.618272] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 682.618272] env[65503]: value = "task-4449622" [ 682.618272] env[65503]: _type = "Task" [ 682.618272] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.618823] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.619499] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.641366] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.664119] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449620, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.720296] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449618, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.751526] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449621, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.802968] env[65503]: WARNING openstack [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.803430] env[65503]: WARNING openstack [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.837832] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "45a4b511-aa6a-433d-b136-f53686db9575" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.838065] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "45a4b511-aa6a-433d-b136-f53686db9575" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.874487] env[65503]: WARNING neutronclient.v2_0.client [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 682.875253] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.875682] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.986390] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c92877f3-47fb-4e3e-9df8-e6fb6d8b8e45 tempest-ServersAdmin275Test-241681051 tempest-ServersAdmin275Test-241681051-project-member] Lock "50f11559-b8c7-41a2-aa43-255a28ffa58c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.882s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.103110] env[65503]: WARNING neutronclient.v2_0.client [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 683.103556] env[65503]: WARNING openstack [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 683.103994] env[65503]: WARNING openstack [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 683.144111] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.168237] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581046} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.168626] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] fd548bd7-b686-43ef-83a7-c40addf8ba75/fd548bd7-b686-43ef-83a7-c40addf8ba75.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.168936] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.169206] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57e0a4b2-7819-4d17-8db1-b5760095255f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.178065] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 683.178065] env[65503]: value = "task-4449623" [ 683.178065] env[65503]: _type = "Task" [ 683.178065] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.191401] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.220016] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449618, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.247505] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449621, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.822979} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.250598] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 80cf5690-8a18-471a-b02f-3b7b9e539c0d/80cf5690-8a18-471a-b02f-3b7b9e539c0d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.251127] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.251686] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33b9fe95-623a-46af-8c4f-f2627e77623e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.270046] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 683.270046] env[65503]: value = "task-4449624" [ 683.270046] env[65503]: _type = "Task" [ 683.270046] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.280395] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.395708] env[65503]: DEBUG nova.network.neutron [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Successfully updated port: f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 683.423834] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Updated VIF entry in instance network info cache for port fc1177b8-43dc-4887-a8c1-5f9ebadba2be. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 683.424193] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Updating instance_info_cache with network_info: [{"id": "fc1177b8-43dc-4887-a8c1-5f9ebadba2be", "address": "fa:16:3e:02:96:7c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc1177b8-43", "ovs_interfaceid": "fc1177b8-43dc-4887-a8c1-5f9ebadba2be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 683.517139] env[65503]: DEBUG nova.network.neutron [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updated VIF entry in instance network info cache for port 3c1610d7-879b-45f7-9a0e-9f341fd61000. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 683.518045] env[65503]: DEBUG nova.network.neutron [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updating instance_info_cache with network_info: [{"id": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "address": "fa:16:3e:6f:e6:a5", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78a3ed0-4f", "ovs_interfaceid": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "address": "fa:16:3e:66:a5:59", "network": {"id": "c298e7f5-c2c4-4a56-981f-07c28bfc1dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1910253340", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1610d7-87", "ovs_interfaceid": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ca808092-6ae6-418d-9074-0f8e4b10289a", "address": "fa:16:3e:fd:89:ae", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca808092-6a", "ovs_interfaceid": "ca808092-6ae6-418d-9074-0f8e4b10289a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 683.587104] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4455cb16-f710-415a-94f8-eca7f3fa0c68 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.596564] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb728ac4-c2f6-448e-b28c-622b055e408e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.632943] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3ef710-dca3-4f12-b5d4-d25113d350f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.646058] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fa3fc4-ab04-4c81-a13e-7384f19b2198 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.650465] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449622, 'name': ReconfigVM_Task, 'duration_secs': 0.681893} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.652779] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 3ac287b4-2538-472b-84ac-7fed3c2ffff3/3ac287b4-2538-472b-84ac-7fed3c2ffff3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.652779] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94a77130-c1fa-4744-9d24-b8f7f192fc3d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.664176] env[65503]: DEBUG nova.compute.provider_tree [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.666464] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 683.666464] env[65503]: value = "task-4449625" [ 683.666464] env[65503]: _type = "Task" [ 683.666464] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.678120] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449625, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.699071] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095043} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.699370] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.700247] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566efc48-072c-431a-bdd4-2d3ca06c99cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.733830] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] fd548bd7-b686-43ef-83a7-c40addf8ba75/fd548bd7-b686-43ef-83a7-c40addf8ba75.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.734664] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12fce089-0daf-4718-9984-4a98c1f57c49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.753761] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449618, 'name': CloneVM_Task, 'duration_secs': 1.634801} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.754513] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Created linked-clone VM from snapshot [ 683.755504] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90dc937-a0fe-4cc8-8052-812c2962567c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.760798] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 683.760798] env[65503]: value = "task-4449626" [ 683.760798] env[65503]: _type = "Task" [ 683.760798] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.768025] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Uploading image 0669c941-b0c9-4f0f-b5e0-cdb44ce63e35 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 683.780294] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449626, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.790760] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082632} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.794162] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.795538] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62620c09-1d71-4328-8063-c58520750707 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.800755] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 683.800755] env[65503]: value = "vm-870277" [ 683.800755] env[65503]: _type = "VirtualMachine" [ 683.800755] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 683.801151] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e6cdbc5b-ef74-4aa5-8b78-afec3dbb02e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.823489] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 80cf5690-8a18-471a-b02f-3b7b9e539c0d/80cf5690-8a18-471a-b02f-3b7b9e539c0d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.824708] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b975b4f4-57cc-4a02-9168-d14555e8a9c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.844052] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lease: (returnval){ [ 683.844052] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c61d7-2722-eeee-105e-3aea4e3c4e09" [ 683.844052] env[65503]: _type = "HttpNfcLease" [ 683.844052] env[65503]: } obtained for exporting VM: (result){ [ 683.844052] env[65503]: value = "vm-870277" [ 683.844052] env[65503]: _type = "VirtualMachine" [ 683.844052] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 683.844396] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the lease: (returnval){ [ 683.844396] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c61d7-2722-eeee-105e-3aea4e3c4e09" [ 683.844396] env[65503]: _type = "HttpNfcLease" [ 683.844396] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 683.851111] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 683.851111] env[65503]: value = "task-4449628" [ 683.851111] env[65503]: _type = "Task" [ 683.851111] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.853107] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 683.853107] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c61d7-2722-eeee-105e-3aea4e3c4e09" [ 683.853107] env[65503]: _type = "HttpNfcLease" [ 683.853107] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 683.856610] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 683.856610] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c61d7-2722-eeee-105e-3aea4e3c4e09" [ 683.856610] env[65503]: _type = "HttpNfcLease" [ 683.856610] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 683.858071] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc5bdb4-6c7c-4f00-9d5f-38c063fa67c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.872031] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52525ccf-7796-9398-cf89-7d4cc020b8eb/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 683.872031] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52525ccf-7796-9398-cf89-7d4cc020b8eb/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 683.873100] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449628, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.931594] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.931793] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.931943] env[65503]: DEBUG nova.network.neutron [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 683.933355] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Releasing lock "refresh_cache-3ac287b4-2538-472b-84ac-7fed3c2ffff3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.933592] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-vif-plugged-ca808092-6ae6-418d-9074-0f8e4b10289a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 683.936565] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquiring lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.936565] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.936565] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.936565] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] No waiting events found dispatching network-vif-plugged-ca808092-6ae6-418d-9074-0f8e4b10289a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 683.936565] env[65503]: WARNING nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received unexpected event network-vif-plugged-ca808092-6ae6-418d-9074-0f8e4b10289a for instance with vm_state building and task_state spawning. [ 683.936565] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-changed-ca808092-6ae6-418d-9074-0f8e4b10289a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 683.936565] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Refreshing instance network info cache due to event network-changed-ca808092-6ae6-418d-9074-0f8e4b10289a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 683.936565] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquiring lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.969933] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3af3b376-27d2-420f-b8c1-7bfad7859184 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.024602] env[65503]: DEBUG oslo_concurrency.lockutils [req-df8bf59c-dc45-4151-af03-3ded0deb5db0 req-a2aff9fb-a462-45bb-ad1b-de220cf5d4f0 service nova] Releasing lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.025133] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquired lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.025133] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Refreshing network info cache for port ca808092-6ae6-418d-9074-0f8e4b10289a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 684.066763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "ab09cfe5-8257-462b-9ebf-87081d5793ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.066763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "ab09cfe5-8257-462b-9ebf-87081d5793ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.067917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "ab09cfe5-8257-462b-9ebf-87081d5793ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.067917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "ab09cfe5-8257-462b-9ebf-87081d5793ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.067917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "ab09cfe5-8257-462b-9ebf-87081d5793ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.069884] env[65503]: INFO nova.compute.manager [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Terminating instance [ 684.168049] env[65503]: DEBUG nova.scheduler.client.report [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 684.194216] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449625, 'name': Rename_Task, 'duration_secs': 0.161371} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.195349] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.195349] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81732913-a5ba-4579-8a25-77987156a03b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.208037] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 684.208037] env[65503]: value = "task-4449629" [ 684.208037] env[65503]: _type = "Task" [ 684.208037] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.217522] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.274435] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449626, 'name': ReconfigVM_Task, 'duration_secs': 0.297597} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.274724] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Reconfigured VM instance instance-0000001a to attach disk [datastore1] fd548bd7-b686-43ef-83a7-c40addf8ba75/fd548bd7-b686-43ef-83a7-c40addf8ba75.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.275513] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-583057f9-3c2b-49c3-878c-813fbc11b1d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.284273] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 684.284273] env[65503]: value = "task-4449630" [ 684.284273] env[65503]: _type = "Task" [ 684.284273] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.296881] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449630, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.364662] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449628, 'name': ReconfigVM_Task, 'duration_secs': 0.334077} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.365151] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 80cf5690-8a18-471a-b02f-3b7b9e539c0d/80cf5690-8a18-471a-b02f-3b7b9e539c0d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.365784] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-920f13b0-041f-4c7d-bc21-5dfddfe23c27 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.374687] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 684.374687] env[65503]: value = "task-4449631" [ 684.374687] env[65503]: _type = "Task" [ 684.374687] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.385282] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449631, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.436531] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 684.437388] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 684.497473] env[65503]: DEBUG nova.network.neutron [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 684.530172] env[65503]: WARNING neutronclient.v2_0.client [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 684.530172] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 684.533324] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 684.541617] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 684.541870] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 684.577388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "refresh_cache-ab09cfe5-8257-462b-9ebf-87081d5793ac" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.577388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquired lock "refresh_cache-ab09cfe5-8257-462b-9ebf-87081d5793ac" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.577388] env[65503]: DEBUG nova.network.neutron [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 684.583171] env[65503]: DEBUG nova.compute.manager [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Received event network-changed-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 684.587357] env[65503]: DEBUG nova.compute.manager [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Refreshing instance network info cache due to event network-changed-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 684.587638] env[65503]: DEBUG oslo_concurrency.lockutils [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Acquiring lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.587786] env[65503]: DEBUG oslo_concurrency.lockutils [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Acquired lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.588130] env[65503]: DEBUG nova.network.neutron [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Refreshing network info cache for port ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 684.680530] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.229s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.685099] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.370s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.689222] env[65503]: INFO nova.compute.claims [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 684.724528] env[65503]: DEBUG oslo_vmware.api [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449629, 'name': PowerOnVM_Task, 'duration_secs': 0.512293} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.727122] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 684.727122] env[65503]: INFO nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Took 12.07 seconds to spawn the instance on the hypervisor. [ 684.727375] env[65503]: DEBUG nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 684.728236] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18eefdc0-4d52-4045-82aa-faba1b8f755e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.745191] env[65503]: INFO nova.scheduler.client.report [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted allocations for instance 174c806e-c2e8-4064-8800-d4a35c19f5e6 [ 684.798036] env[65503]: WARNING neutronclient.v2_0.client [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 684.800202] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 684.801157] env[65503]: WARNING openstack [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 684.819807] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449630, 'name': Rename_Task, 'duration_secs': 0.190749} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.823029] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 684.823029] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 684.832487] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.833706] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdf59948-ead7-46e5-92cc-54e5f28bed7f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.844869] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 684.844869] env[65503]: value = "task-4449632" [ 684.844869] env[65503]: _type = "Task" [ 684.844869] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.858561] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.894428] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449631, 'name': Rename_Task, 'duration_secs': 0.261979} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.894428] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.896906] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3045646-6877-4c5d-a868-6c5b542c3f0b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.908321] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 684.908321] env[65503]: value = "task-4449633" [ 684.908321] env[65503]: _type = "Task" [ 684.908321] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.925292] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.048633] env[65503]: DEBUG nova.network.neutron [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 685.079117] env[65503]: WARNING neutronclient.v2_0.client [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 685.082229] env[65503]: WARNING openstack [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 685.082876] env[65503]: WARNING openstack [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 685.100153] env[65503]: WARNING neutronclient.v2_0.client [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 685.102271] env[65503]: WARNING openstack [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 685.102501] env[65503]: WARNING openstack [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 685.113456] env[65503]: DEBUG nova.compute.manager [req-e8c8ce78-d245-4218-a4f9-e66063ef1a92 req-24028fa1-0e1a-43e7-9be3-35e074addb32 service nova] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Received event network-vif-deleted-b9ca30d7-8bbd-483b-bc06-14bf5c43111c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 685.114219] env[65503]: WARNING neutronclient.v2_0.client [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 685.114925] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 685.115389] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 685.151339] env[65503]: DEBUG nova.network.neutron [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 685.175306] env[65503]: DEBUG nova.compute.manager [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 685.176474] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f030f86-4b42-4d1e-a7e9-72a03371a23f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.257323] env[65503]: INFO nova.compute.manager [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Took 30.56 seconds to build instance. [ 685.263776] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84bfbe07-d8c6-44d7-99df-cdf371c1c4d2 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "174c806e-c2e8-4064-8800-d4a35c19f5e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.246s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.357867] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449632, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.368647] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "a1908e71-31f9-4308-b4d6-7908d3208c5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.369036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.369452] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "a1908e71-31f9-4308-b4d6-7908d3208c5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.369765] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.369986] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.372557] env[65503]: INFO nova.compute.manager [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Terminating instance [ 685.424086] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449633, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.554096] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.554649] env[65503]: DEBUG nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Instance network_info: |[{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 685.555442] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:65:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f191aa02-3240-4647-9358-ee80ef3eb29d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.565321] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Creating folder: Project (be67f50c5bc447309d4c04f3f2805455). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.565552] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f77bf1f-6b81-4821-84c6-13b080bfcc56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.605896] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Created folder: Project (be67f50c5bc447309d4c04f3f2805455) in parent group-v870190. [ 685.605896] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Creating folder: Instances. Parent ref: group-v870278. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.605896] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58fa1070-ec3b-4e78-9afa-09773a799241 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.605896] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Created folder: Instances in parent group-v870278. [ 685.605896] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 685.605896] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.605896] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5a2c67c-292a-49ad-9223-b72d69ce68eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.631476] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.631476] env[65503]: value = "task-4449636" [ 685.631476] env[65503]: _type = "Task" [ 685.631476] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.651017] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449636, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.702220] env[65503]: INFO nova.compute.manager [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] instance snapshotting [ 685.709614] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b84ca5-576d-4470-9e8f-3ca32a7185d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.736798] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9a4e6e-aa6b-494d-9746-5d6a5fb81c28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.760078] env[65503]: DEBUG nova.network.neutron [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 685.762716] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e112236f-c15e-417f-b1bb-b52bdb60fe1d tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.738s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.812112] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updated VIF entry in instance network info cache for port ca808092-6ae6-418d-9074-0f8e4b10289a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 685.812741] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updating instance_info_cache with network_info: [{"id": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "address": "fa:16:3e:6f:e6:a5", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78a3ed0-4f", "ovs_interfaceid": "f78a3ed0-4f14-45aa-afd6-01ab10698376", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "address": "fa:16:3e:66:a5:59", "network": {"id": "c298e7f5-c2c4-4a56-981f-07c28bfc1dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1910253340", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1610d7-87", "ovs_interfaceid": "3c1610d7-879b-45f7-9a0e-9f341fd61000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ca808092-6ae6-418d-9074-0f8e4b10289a", "address": "fa:16:3e:fd:89:ae", "network": {"id": "cbc6bc8b-08b5-45ea-a1f7-9189971bd55d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1544130521", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca808092-6a", "ovs_interfaceid": "ca808092-6ae6-418d-9074-0f8e4b10289a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 685.861968] env[65503]: DEBUG oslo_vmware.api [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449632, 'name': PowerOnVM_Task, 'duration_secs': 0.68927} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.862501] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.863253] env[65503]: INFO nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Took 16.91 seconds to spawn the instance on the hypervisor. [ 685.863676] env[65503]: DEBUG nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 685.864738] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bf8e47-8147-4e5a-8f13-5e323efe0191 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.877831] env[65503]: DEBUG nova.compute.manager [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 685.878017] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 685.883231] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2df7e3-107f-46a4-868e-b912f5294175 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.891846] env[65503]: WARNING openstack [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 685.893018] env[65503]: WARNING openstack [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 685.907214] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 685.907657] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0186d2a-44ca-45cf-8e88-dd583fb0c390 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.918272] env[65503]: DEBUG oslo_vmware.api [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 685.918272] env[65503]: value = "task-4449637" [ 685.918272] env[65503]: _type = "Task" [ 685.918272] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.928691] env[65503]: DEBUG oslo_vmware.api [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449633, 'name': PowerOnVM_Task, 'duration_secs': 0.619448} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.930993] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.931074] env[65503]: INFO nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Took 9.21 seconds to spawn the instance on the hypervisor. [ 685.933835] env[65503]: DEBUG nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 685.933835] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1d7dff-9057-4e07-817f-5d432ccff4b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.941295] env[65503]: DEBUG oslo_vmware.api [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.018350] env[65503]: WARNING neutronclient.v2_0.client [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.019597] env[65503]: WARNING openstack [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 686.020691] env[65503]: WARNING openstack [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 686.150092] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449636, 'name': CreateVM_Task, 'duration_secs': 0.504371} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.156237] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.156422] env[65503]: WARNING neutronclient.v2_0.client [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.156926] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.157165] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.157630] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 686.157904] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bec4d39-091c-4360-8288-99f8b26bc975 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.164812] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 686.164812] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5248a6ab-95ea-4547-33ef-bf6be781c0d5" [ 686.164812] env[65503]: _type = "Task" [ 686.164812] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.180558] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5248a6ab-95ea-4547-33ef-bf6be781c0d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.252323] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 686.255163] env[65503]: DEBUG nova.network.neutron [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Updated VIF entry in instance network info cache for port ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 686.255584] env[65503]: DEBUG nova.network.neutron [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Updating instance_info_cache with network_info: [{"id": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "address": "fa:16:3e:b6:e2:b5", "network": {"id": "1f384090-e7f5-4516-8faf-3069a073e0c8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-745014278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eecbcfbbb0904f57939c23ef03418bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab13f109-bf", "ovs_interfaceid": "ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 686.257765] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0c6cf32f-45ce-4cf1-ace5-fdadd1395712 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.263909] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Releasing lock "refresh_cache-ab09cfe5-8257-462b-9ebf-87081d5793ac" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.264117] env[65503]: DEBUG nova.compute.manager [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 686.264719] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.266215] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c915b869-bd58-48d7-8c74-b24005f3afad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.271679] env[65503]: DEBUG nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 686.278401] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 686.278401] env[65503]: value = "task-4449638" [ 686.278401] env[65503]: _type = "Task" [ 686.278401] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.290676] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 686.290907] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad042d58-5fe5-4f9f-bc23-608cd5c3a5e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.297787] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.308055] env[65503]: DEBUG oslo_vmware.api [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 686.308055] env[65503]: value = "task-4449639" [ 686.308055] env[65503]: _type = "Task" [ 686.308055] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.324656] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Releasing lock "refresh_cache-fd548bd7-b686-43ef-83a7-c40addf8ba75" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.324783] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Received event network-vif-plugged-0aa00537-b95b-4252-b80d-90e59542088b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 686.325952] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquiring lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.326215] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.326406] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.326796] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] No waiting events found dispatching network-vif-plugged-0aa00537-b95b-4252-b80d-90e59542088b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 686.327120] env[65503]: WARNING nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Received unexpected event network-vif-plugged-0aa00537-b95b-4252-b80d-90e59542088b for instance with vm_state building and task_state spawning. [ 686.327801] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Received event network-changed-0aa00537-b95b-4252-b80d-90e59542088b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 686.327994] env[65503]: DEBUG nova.compute.manager [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Refreshing instance network info cache due to event network-changed-0aa00537-b95b-4252-b80d-90e59542088b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 686.328321] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquiring lock "refresh_cache-80cf5690-8a18-471a-b02f-3b7b9e539c0d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.328759] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Acquired lock "refresh_cache-80cf5690-8a18-471a-b02f-3b7b9e539c0d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.328983] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Refreshing network info cache for port 0aa00537-b95b-4252-b80d-90e59542088b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 686.331689] env[65503]: DEBUG oslo_vmware.api [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.401771] env[65503]: INFO nova.compute.manager [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Took 36.70 seconds to build instance. [ 686.434629] env[65503]: DEBUG oslo_vmware.api [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449637, 'name': PowerOffVM_Task, 'duration_secs': 0.313995} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.436159] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 686.436454] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 686.437480] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6537c0-15f9-4218-8d44-552d8de9d2bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.440552] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-874108fe-1f0c-4b9b-8954-0db444f0a73b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.449026] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bef211c-95ea-4e88-8862-40764e70f97a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.493683] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f458ae45-a646-44f9-b304-c2d0d403dc6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.497552] env[65503]: INFO nova.compute.manager [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Took 29.30 seconds to build instance. [ 686.505568] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328d9eb2-af29-4985-bdec-b54cdfae9d67 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.526913] env[65503]: DEBUG nova.compute.provider_tree [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.535940] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 686.536334] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 686.536637] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Deleting the datastore file [datastore2] a1908e71-31f9-4308-b4d6-7908d3208c5a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 686.537117] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1f296e4-4cfc-4875-8ba6-182d84ad31d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.548178] env[65503]: DEBUG oslo_vmware.api [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for the task: (returnval){ [ 686.548178] env[65503]: value = "task-4449641" [ 686.548178] env[65503]: _type = "Task" [ 686.548178] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.559814] env[65503]: DEBUG oslo_vmware.api [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.680935] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5248a6ab-95ea-4547-33ef-bf6be781c0d5, 'name': SearchDatastore_Task, 'duration_secs': 0.015323} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.681361] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.681630] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.681974] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.684320] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.684574] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.684888] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5afd88c1-2933-4a65-a8b7-b4008e2fb47d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.700728] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.700990] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.701936] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31afea71-fca0-4bc4-9220-f9e9fedeec99 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.713889] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 686.713889] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522dc8ab-4c61-8c45-0cb0-55aeac7c2994" [ 686.713889] env[65503]: _type = "Task" [ 686.713889] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.724606] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522dc8ab-4c61-8c45-0cb0-55aeac7c2994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.764593] env[65503]: DEBUG oslo_concurrency.lockutils [req-00679499-0dec-4b68-a1e0-322cae6b1c6b req-6fbb68c2-ec1f-4a8a-af8b-6a795a05a00c service nova] Releasing lock "refresh_cache-f7b81948-c480-47a4-9d0f-5c2c163bd7f2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.796625] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.800087] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.821175] env[65503]: DEBUG oslo_vmware.api [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449639, 'name': PowerOffVM_Task, 'duration_secs': 0.128031} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.821175] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 686.821175] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 686.821399] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e8759a1-ee2d-4143-927b-82c4b9bbade9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.836304] env[65503]: WARNING neutronclient.v2_0.client [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.838616] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 686.838616] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 686.852126] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 686.852430] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 686.852609] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Deleting the datastore file [datastore2] ab09cfe5-8257-462b-9ebf-87081d5793ac {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 686.852923] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fafc7a3-fbab-432d-b601-8438d66e89a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.861420] env[65503]: DEBUG oslo_vmware.api [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for the task: (returnval){ [ 686.861420] env[65503]: value = "task-4449643" [ 686.861420] env[65503]: _type = "Task" [ 686.861420] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.873993] env[65503]: DEBUG oslo_vmware.api [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.909149] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5730be0f-6715-4469-bc39-4f19f96a5650 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.550s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.000678] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.000678] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 687.007136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-051c3e3a-290d-4bbf-93e7-004c366e365e tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.819s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.032854] env[65503]: DEBUG nova.scheduler.client.report [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 687.068502] env[65503]: DEBUG oslo_vmware.api [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Task: {'id': task-4449641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235058} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.069184] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.069184] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 687.069259] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.069456] env[65503]: INFO nova.compute.manager [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 687.069758] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 687.071063] env[65503]: DEBUG nova.compute.manager [-] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 687.071063] env[65503]: DEBUG nova.network.neutron [-] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 687.071063] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.071250] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.071580] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 687.085754] env[65503]: WARNING neutronclient.v2_0.client [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.086552] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.087037] env[65503]: WARNING openstack [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 687.166271] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.176095] env[65503]: DEBUG nova.compute.manager [None req-7044f6a0-7e64-4539-a15c-29803c738045 tempest-ServerDiagnosticsTest-1989417394 tempest-ServerDiagnosticsTest-1989417394-project-admin] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 687.177396] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb838139-b9f3-4667-b47e-bcbfb7c617ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.187585] env[65503]: INFO nova.compute.manager [None req-7044f6a0-7e64-4539-a15c-29803c738045 tempest-ServerDiagnosticsTest-1989417394 tempest-ServerDiagnosticsTest-1989417394-project-admin] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Retrieving diagnostics [ 687.188827] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9815637-98f1-406f-9acd-6a1818d8a89c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.227954] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Updated VIF entry in instance network info cache for port 0aa00537-b95b-4252-b80d-90e59542088b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 687.228262] env[65503]: DEBUG nova.network.neutron [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Updating instance_info_cache with network_info: [{"id": "0aa00537-b95b-4252-b80d-90e59542088b", "address": "fa:16:3e:e4:4d:2b", "network": {"id": "d2bc52b3-2c24-4b80-8693-024d8ed8c87d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1108649423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8a1949e411a84b0ab8d3a249bf80f92f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aa00537-b9", "ovs_interfaceid": "0aa00537-b95b-4252-b80d-90e59542088b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 687.241315] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522dc8ab-4c61-8c45-0cb0-55aeac7c2994, 'name': SearchDatastore_Task, 'duration_secs': 0.01469} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.243541] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca914176-037b-4122-9032-52fffdd8ca76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.252692] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 687.252692] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8ffb4-0cee-6cc6-ded9-dd9170b04c2c" [ 687.252692] env[65503]: _type = "Task" [ 687.252692] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.264793] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8ffb4-0cee-6cc6-ded9-dd9170b04c2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.294423] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449638, 'name': CreateSnapshot_Task, 'duration_secs': 0.793623} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.294708] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 687.295555] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878b6fb6-5fb0-4922-8d2b-d173ad4b1536 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.377091] env[65503]: DEBUG oslo_vmware.api [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Task: {'id': task-4449643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160543} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.377091] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.377453] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 687.377528] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.377782] env[65503]: INFO nova.compute.manager [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Took 1.11 seconds to destroy the instance on the hypervisor. [ 687.377980] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 687.378259] env[65503]: DEBUG nova.compute.manager [-] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 687.378356] env[65503]: DEBUG nova.network.neutron [-] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 687.378807] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.379415] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.379719] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 687.404680] env[65503]: DEBUG nova.network.neutron [-] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 687.405041] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.473411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.473411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.541135] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.856s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.541943] env[65503]: DEBUG nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 687.545997] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.191s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.546257] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.546424] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 687.546736] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.684s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.546986] env[65503]: DEBUG nova.objects.instance [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lazy-loading 'resources' on Instance uuid 85d0ed1d-6306-4999-832b-f4e69233fec7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 687.549248] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4af27e-ff09-444e-81b0-47314b933c83 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.560301] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d12295-a8ac-4eed-8d3b-786c09e07b05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.582568] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5412aca-77ab-4243-a9fa-afdb6e9337ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.587469] env[65503]: DEBUG nova.compute.manager [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received event network-vif-plugged-f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 687.587469] env[65503]: DEBUG oslo_concurrency.lockutils [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.587469] env[65503]: DEBUG oslo_concurrency.lockutils [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.587469] env[65503]: DEBUG oslo_concurrency.lockutils [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.587469] env[65503]: DEBUG nova.compute.manager [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] No waiting events found dispatching network-vif-plugged-f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 687.587767] env[65503]: WARNING nova.compute.manager [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received unexpected event network-vif-plugged-f191aa02-3240-4647-9358-ee80ef3eb29d for instance with vm_state building and task_state spawning. [ 687.587767] env[65503]: DEBUG nova.compute.manager [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received event network-changed-f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 687.587909] env[65503]: DEBUG nova.compute.manager [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Refreshing instance network info cache due to event network-changed-f191aa02-3240-4647-9358-ee80ef3eb29d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 687.588119] env[65503]: DEBUG oslo_concurrency.lockutils [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Acquiring lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.588300] env[65503]: DEBUG oslo_concurrency.lockutils [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Acquired lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.588472] env[65503]: DEBUG nova.network.neutron [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Refreshing network info cache for port f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 687.600631] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a113fefa-b7ad-4be0-8e54-d1e692a9f60c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.636946] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178114MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 687.637130] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.731950] env[65503]: DEBUG oslo_concurrency.lockutils [req-97f5b027-34d2-4d9e-8384-6701002d6a5f req-f08534bb-ff48-42db-b93f-23102e0be9af service nova] Releasing lock "refresh_cache-80cf5690-8a18-471a-b02f-3b7b9e539c0d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.770625] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8ffb4-0cee-6cc6-ded9-dd9170b04c2c, 'name': SearchDatastore_Task, 'duration_secs': 0.017794} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.772596] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.773132] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a197b590-1f74-4241-9579-2f2d3bb89a1d/a197b590-1f74-4241-9579-2f2d3bb89a1d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.773132] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-043a2ce7-7928-4345-98f0-bf19499b54a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.782605] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 687.782605] env[65503]: value = "task-4449644" [ 687.782605] env[65503]: _type = "Task" [ 687.782605] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.793654] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449644, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.819679] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 687.820556] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e65130a0-c0a3-4792-8f33-a565ac8756fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.831363] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 687.831363] env[65503]: value = "task-4449645" [ 687.831363] env[65503]: _type = "Task" [ 687.831363] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.842751] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449645, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.908990] env[65503]: DEBUG nova.network.neutron [-] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 687.976187] env[65503]: DEBUG nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 688.049332] env[65503]: DEBUG nova.compute.utils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 688.050832] env[65503]: DEBUG nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 688.051245] env[65503]: DEBUG nova.network.neutron [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 688.051624] env[65503]: WARNING neutronclient.v2_0.client [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.051973] env[65503]: WARNING neutronclient.v2_0.client [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.052599] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.052972] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.095091] env[65503]: WARNING neutronclient.v2_0.client [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.098029] env[65503]: WARNING openstack [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.098029] env[65503]: WARNING openstack [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.169842] env[65503]: DEBUG nova.policy [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5203efa0354baca5354d76cf3365c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf54348a3d0948cfa816cc3746e86806', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 688.276522] env[65503]: DEBUG nova.network.neutron [-] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 688.310708] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449644, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.352065] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449645, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.397648] env[65503]: WARNING openstack [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.398304] env[65503]: WARNING openstack [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.411860] env[65503]: INFO nova.compute.manager [-] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Took 1.03 seconds to deallocate network for instance. [ 688.508268] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.544732] env[65503]: WARNING neutronclient.v2_0.client [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.545745] env[65503]: WARNING openstack [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.546024] env[65503]: WARNING openstack [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.562500] env[65503]: DEBUG nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 688.787045] env[65503]: DEBUG nova.network.neutron [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Successfully created port: 5a0ecec2-8866-4131-aa0b-e63ba349190f {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 688.793082] env[65503]: INFO nova.compute.manager [-] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Took 1.72 seconds to deallocate network for instance. [ 688.803867] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449644, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631267} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.805039] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a197b590-1f74-4241-9579-2f2d3bb89a1d/a197b590-1f74-4241-9579-2f2d3bb89a1d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.805039] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.808079] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5aa61eb1-b671-4524-80ed-33f4ba9b12cf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.819207] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 688.819207] env[65503]: value = "task-4449646" [ 688.819207] env[65503]: _type = "Task" [ 688.819207] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.828779] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449646, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.831504] env[65503]: DEBUG nova.compute.manager [req-cb603ff1-28c7-459c-86b5-58bdf132f372 req-979ca023-42a8-4954-9fd8-9542c6786cb7 service nova] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Received event network-vif-deleted-939876df-2b3d-4723-8926-1187feb4fe37 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 688.856651] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449645, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.858386] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda84d6a-5376-44eb-a72b-6d922b2b5649 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.869429] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a9ccef-c264-4d0b-9bfb-827836ab9655 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.911349] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eadf168-c4fb-4543-b49f-bf323bcd52e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.920972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2aa16d3-2bed-4979-82eb-a0de2e33b84e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.927234] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.940039] env[65503]: DEBUG nova.compute.provider_tree [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.955856] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "fd548bd7-b686-43ef-83a7-c40addf8ba75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.956497] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.956993] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.957493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.958967] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.962046] env[65503]: INFO nova.compute.manager [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Terminating instance [ 688.992984] env[65503]: DEBUG nova.network.neutron [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updated VIF entry in instance network info cache for port f191aa02-3240-4647-9358-ee80ef3eb29d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 688.993546] env[65503]: DEBUG nova.network.neutron [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 689.092441] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.092709] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.092921] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.093134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.093336] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.095924] env[65503]: INFO nova.compute.manager [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Terminating instance [ 689.306103] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.329029] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449646, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11674} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.329242] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.330041] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8eeb0b-318f-4cd1-98b8-dbc3122bc5c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.359951] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] a197b590-1f74-4241-9579-2f2d3bb89a1d/a197b590-1f74-4241-9579-2f2d3bb89a1d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.361144] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3ba6307-046c-4935-9bcb-61268a4ec518 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.382171] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449645, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.388177] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 689.388177] env[65503]: value = "task-4449647" [ 689.388177] env[65503]: _type = "Task" [ 689.388177] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.398316] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449647, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.444137] env[65503]: DEBUG nova.scheduler.client.report [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.469314] env[65503]: DEBUG nova.compute.manager [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 689.469529] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 689.470914] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df2a4c6-efdf-44bc-a726-15c14a6331f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.484309] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.484582] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f3b925c-6217-4a24-b771-a524e248b198 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.496430] env[65503]: DEBUG oslo_vmware.api [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 689.496430] env[65503]: value = "task-4449648" [ 689.496430] env[65503]: _type = "Task" [ 689.496430] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.499058] env[65503]: DEBUG oslo_concurrency.lockutils [req-d88b4169-3b14-4cc6-9a4e-02d2e7d98c60 req-54f4da04-dd07-45f3-b974-95fa19276e47 service nova] Releasing lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.506471] env[65503]: DEBUG oslo_vmware.api [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.578738] env[65503]: DEBUG nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 689.601239] env[65503]: DEBUG nova.compute.manager [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 689.601476] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 689.603224] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5bdd63-b550-4c16-864f-4f6202a4365d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.610694] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 689.611054] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 689.611229] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 689.611373] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 689.611524] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 689.611645] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 689.611851] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 689.612026] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 689.612204] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 689.612355] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 689.612554] env[65503]: DEBUG nova.virt.hardware [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 689.613939] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b65125-01b7-44df-8ab4-3f8e94cb9a81 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.620737] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.621498] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29610e5f-ea6d-4cd9-9271-afdda19995f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.628183] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81821f8d-0a6d-4477-b178-a92b11bf5e50 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.634417] env[65503]: DEBUG oslo_vmware.api [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 689.634417] env[65503]: value = "task-4449649" [ 689.634417] env[65503]: _type = "Task" [ 689.634417] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.654504] env[65503]: DEBUG oslo_vmware.api [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.847327] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449645, 'name': CloneVM_Task, 'duration_secs': 1.75541} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.847630] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Created linked-clone VM from snapshot [ 689.849323] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7170bd16-27ad-4639-a1fa-20b725d25e96 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.859313] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Uploading image 4f4c9f00-a608-40ff-97e6-7644f89694ed {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 689.890547] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 689.890547] env[65503]: value = "vm-870282" [ 689.890547] env[65503]: _type = "VirtualMachine" [ 689.890547] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 689.891144] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e89bf420-b302-4954-94a1-66cf2efdb75e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.904429] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449647, 'name': ReconfigVM_Task, 'duration_secs': 0.489688} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.906271] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Reconfigured VM instance instance-0000001d to attach disk [datastore2] a197b590-1f74-4241-9579-2f2d3bb89a1d/a197b590-1f74-4241-9579-2f2d3bb89a1d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 689.907533] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lease: (returnval){ [ 689.907533] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f5e75-da10-ea24-0d00-83d25d3ac7c7" [ 689.907533] env[65503]: _type = "HttpNfcLease" [ 689.907533] env[65503]: } obtained for exporting VM: (result){ [ 689.907533] env[65503]: value = "vm-870282" [ 689.907533] env[65503]: _type = "VirtualMachine" [ 689.907533] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 689.907835] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the lease: (returnval){ [ 689.907835] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f5e75-da10-ea24-0d00-83d25d3ac7c7" [ 689.907835] env[65503]: _type = "HttpNfcLease" [ 689.907835] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 689.907976] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-813dc714-a59d-4741-acb9-c04218a5c8e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.920576] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 689.920576] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f5e75-da10-ea24-0d00-83d25d3ac7c7" [ 689.920576] env[65503]: _type = "HttpNfcLease" [ 689.920576] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 689.922945] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 689.922945] env[65503]: value = "task-4449651" [ 689.922945] env[65503]: _type = "Task" [ 689.922945] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.934239] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449651, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.952378] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.405s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.955283] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.810s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.955828] env[65503]: DEBUG nova.objects.instance [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lazy-loading 'resources' on Instance uuid d8d917c1-224c-4773-a911-d09f3f719e1b {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 690.002967] env[65503]: INFO nova.scheduler.client.report [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Deleted allocations for instance 85d0ed1d-6306-4999-832b-f4e69233fec7 [ 690.017285] env[65503]: DEBUG oslo_vmware.api [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449648, 'name': PowerOffVM_Task, 'duration_secs': 0.282301} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.017285] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 690.017285] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 690.017541] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ce7d48a-06c7-437e-ab7d-75b320aa5047 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.150760] env[65503]: DEBUG oslo_vmware.api [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449649, 'name': PowerOffVM_Task, 'duration_secs': 0.204314} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.150954] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 690.151255] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 690.151603] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c704501-d94a-4f6d-83fd-664661499cb9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.171456] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 690.171742] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 690.171970] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Deleting the datastore file [datastore1] fd548bd7-b686-43ef-83a7-c40addf8ba75 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 690.172370] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dae4850a-ef30-4528-abf2-cbd714af5a76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.184304] env[65503]: DEBUG oslo_vmware.api [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 690.184304] env[65503]: value = "task-4449654" [ 690.184304] env[65503]: _type = "Task" [ 690.184304] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.198618] env[65503]: DEBUG oslo_vmware.api [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.277123] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 690.277342] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 690.277933] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Deleting the datastore file [datastore1] 3ac287b4-2538-472b-84ac-7fed3c2ffff3 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 690.278271] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd6370cf-8814-4b34-9068-96ecae045242 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.287770] env[65503]: DEBUG oslo_vmware.api [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for the task: (returnval){ [ 690.287770] env[65503]: value = "task-4449655" [ 690.287770] env[65503]: _type = "Task" [ 690.287770] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.299640] env[65503]: DEBUG oslo_vmware.api [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449655, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.419628] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 690.419628] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f5e75-da10-ea24-0d00-83d25d3ac7c7" [ 690.419628] env[65503]: _type = "HttpNfcLease" [ 690.419628] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 690.419943] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 690.419943] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f5e75-da10-ea24-0d00-83d25d3ac7c7" [ 690.419943] env[65503]: _type = "HttpNfcLease" [ 690.419943] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 690.420773] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e75914-7d90-4f6c-9610-fd5ccf4f9837 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.437491] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449651, 'name': Rename_Task, 'duration_secs': 0.169011} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.441957] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.442421] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.442667] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.442860] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.443287] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.443958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.445811] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a6e31b-f9b6-1285-0b0a-04368a26f7da/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 690.445987] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a6e31b-f9b6-1285-0b0a-04368a26f7da/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 690.447643] env[65503]: INFO nova.compute.manager [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Terminating instance [ 690.449591] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-271ff5ad-9338-46e1-9701-dce12cfe0910 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.521029] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 690.521029] env[65503]: value = "task-4449656" [ 690.521029] env[65503]: _type = "Task" [ 690.521029] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.522937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04fa5626-1185-405c-a2d6-7cc020e558a0 tempest-AttachInterfacesV270Test-354795835 tempest-AttachInterfacesV270Test-354795835-project-member] Lock "85d0ed1d-6306-4999-832b-f4e69233fec7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.704s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.533856] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.613838] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-43047cd3-c49c-4b99-a32f-a3b8838e0554 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.683448] env[65503]: DEBUG nova.network.neutron [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Successfully updated port: 5a0ecec2-8866-4131-aa0b-e63ba349190f {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 690.698855] env[65503]: DEBUG oslo_vmware.api [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24432} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.702169] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 690.702533] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 690.702813] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.702892] env[65503]: INFO nova.compute.manager [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Took 1.23 seconds to destroy the instance on the hypervisor. [ 690.703093] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 690.703341] env[65503]: DEBUG nova.compute.manager [-] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 690.703435] env[65503]: DEBUG nova.network.neutron [-] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 690.703691] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.705433] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 690.705433] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 690.799861] env[65503]: DEBUG oslo_vmware.api [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Task: {'id': task-4449655, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171337} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.799990] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 690.800218] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 690.800334] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.800931] env[65503]: INFO nova.compute.manager [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Took 1.20 seconds to destroy the instance on the hypervisor. [ 690.800931] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 690.804469] env[65503]: DEBUG nova.compute.manager [-] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 690.804469] env[65503]: DEBUG nova.network.neutron [-] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 690.804469] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.804469] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 690.804702] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 690.811834] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.938321] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.955892] env[65503]: DEBUG nova.compute.manager [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 690.956020] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 690.956881] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83595cd-a39c-4518-95f2-89be63efa677 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.967280] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 690.967681] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81244fd8-12e1-4aef-8b5b-521f14b12f37 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.980237] env[65503]: DEBUG oslo_vmware.api [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 690.980237] env[65503]: value = "task-4449657" [ 690.980237] env[65503]: _type = "Task" [ 690.980237] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.990480] env[65503]: DEBUG oslo_vmware.api [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.035664] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449656, 'name': PowerOnVM_Task} progress is 90%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.185941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.186278] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.186549] env[65503]: DEBUG nova.network.neutron [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 691.222739] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d91201-a76f-4dae-836b-5854f6022b3b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.239216] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0afc57-2c47-42c0-8945-be20d4614789 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.281789] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d72b96-28f7-46c8-8ff1-55065e19d12a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.291977] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1131547-df1c-4c3a-9e35-90d554ec4915 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.310747] env[65503]: DEBUG nova.compute.provider_tree [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.491966] env[65503]: DEBUG oslo_vmware.api [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449657, 'name': PowerOffVM_Task, 'duration_secs': 0.274149} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.492439] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 691.493550] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 691.494033] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c692ca6-f297-451b-adaa-f930fb4a4e73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.536469] env[65503]: DEBUG oslo_vmware.api [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449656, 'name': PowerOnVM_Task, 'duration_secs': 0.589851} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.537410] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.537410] env[65503]: INFO nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Took 9.47 seconds to spawn the instance on the hypervisor. [ 691.537410] env[65503]: DEBUG nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 691.539106] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc9937d-9d59-4fe3-a05d-22dd9ea52cd5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.578392] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 691.578624] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 691.578970] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Deleting the datastore file [datastore1] 80cf5690-8a18-471a-b02f-3b7b9e539c0d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 691.579821] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f73e070a-0cae-4888-b66f-ad1fb05c499b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.590142] env[65503]: DEBUG oslo_vmware.api [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for the task: (returnval){ [ 691.590142] env[65503]: value = "task-4449659" [ 691.590142] env[65503]: _type = "Task" [ 691.590142] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.601836] env[65503]: DEBUG oslo_vmware.api [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.691221] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 691.691986] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 691.800591] env[65503]: DEBUG nova.network.neutron [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 691.816333] env[65503]: DEBUG nova.scheduler.client.report [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.824915] env[65503]: DEBUG nova.compute.manager [req-838499f7-6052-4c81-aa5a-cefa12a4f0a0 req-7134bb6f-1153-46ce-84a5-358dc138fb09 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Received event network-vif-plugged-5a0ecec2-8866-4131-aa0b-e63ba349190f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 691.825174] env[65503]: DEBUG oslo_concurrency.lockutils [req-838499f7-6052-4c81-aa5a-cefa12a4f0a0 req-7134bb6f-1153-46ce-84a5-358dc138fb09 service nova] Acquiring lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.828545] env[65503]: DEBUG oslo_concurrency.lockutils [req-838499f7-6052-4c81-aa5a-cefa12a4f0a0 req-7134bb6f-1153-46ce-84a5-358dc138fb09 service nova] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.828545] env[65503]: DEBUG oslo_concurrency.lockutils [req-838499f7-6052-4c81-aa5a-cefa12a4f0a0 req-7134bb6f-1153-46ce-84a5-358dc138fb09 service nova] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.828545] env[65503]: DEBUG nova.compute.manager [req-838499f7-6052-4c81-aa5a-cefa12a4f0a0 req-7134bb6f-1153-46ce-84a5-358dc138fb09 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] No waiting events found dispatching network-vif-plugged-5a0ecec2-8866-4131-aa0b-e63ba349190f {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 691.828545] env[65503]: WARNING nova.compute.manager [req-838499f7-6052-4c81-aa5a-cefa12a4f0a0 req-7134bb6f-1153-46ce-84a5-358dc138fb09 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Received unexpected event network-vif-plugged-5a0ecec2-8866-4131-aa0b-e63ba349190f for instance with vm_state building and task_state spawning. [ 691.834598] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 691.834598] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 691.938275] env[65503]: DEBUG nova.network.neutron [-] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 691.948580] env[65503]: WARNING neutronclient.v2_0.client [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 691.949779] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 691.950435] env[65503]: WARNING openstack [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 692.063310] env[65503]: INFO nova.compute.manager [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Took 32.82 seconds to build instance. [ 692.102544] env[65503]: DEBUG oslo_vmware.api [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Task: {'id': task-4449659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270862} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.102924] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 692.103027] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 692.103249] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 692.103545] env[65503]: INFO nova.compute.manager [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 692.103709] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 692.103855] env[65503]: DEBUG nova.compute.manager [-] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 692.104035] env[65503]: DEBUG nova.network.neutron [-] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 692.104166] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 692.104725] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 692.104985] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 692.278753] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 692.319420] env[65503]: DEBUG nova.network.neutron [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Updating instance_info_cache with network_info: [{"id": "5a0ecec2-8866-4131-aa0b-e63ba349190f", "address": "fa:16:3e:0b:29:6d", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a0ecec2-88", "ovs_interfaceid": "5a0ecec2-8866-4131-aa0b-e63ba349190f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 692.329582] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.374s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.333875] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.699s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.336538] env[65503]: INFO nova.compute.claims [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.364938] env[65503]: INFO nova.scheduler.client.report [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Deleted allocations for instance d8d917c1-224c-4773-a911-d09f3f719e1b [ 692.444316] env[65503]: INFO nova.compute.manager [-] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Took 1.64 seconds to deallocate network for instance. [ 692.539721] env[65503]: DEBUG nova.network.neutron [-] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 692.567812] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ae2cc264-4c7f-4bb3-92a6-977acd5ed8ae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.351s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.769753] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52525ccf-7796-9398-cf89-7d4cc020b8eb/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 692.771976] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4954c5ca-e608-4d77-ae34-cb4b5bc32346 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.779816] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52525ccf-7796-9398-cf89-7d4cc020b8eb/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 692.780389] env[65503]: ERROR oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52525ccf-7796-9398-cf89-7d4cc020b8eb/disk-0.vmdk due to incomplete transfer. [ 692.780822] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-89ff3bd5-4218-4221-8946-8c9c6f687743 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.791538] env[65503]: DEBUG oslo_vmware.rw_handles [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52525ccf-7796-9398-cf89-7d4cc020b8eb/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 692.792277] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Uploaded image 0669c941-b0c9-4f0f-b5e0-cdb44ce63e35 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 692.794206] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 692.794602] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ed2199fa-c9a7-4859-a5f5-c4f778bfb4a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.806556] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 692.806556] env[65503]: value = "task-4449660" [ 692.806556] env[65503]: _type = "Task" [ 692.806556] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.818769] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449660, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.823558] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.823558] env[65503]: DEBUG nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Instance network_info: |[{"id": "5a0ecec2-8866-4131-aa0b-e63ba349190f", "address": "fa:16:3e:0b:29:6d", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a0ecec2-88", "ovs_interfaceid": "5a0ecec2-8866-4131-aa0b-e63ba349190f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 692.823558] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:29:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a0ecec2-8866-4131-aa0b-e63ba349190f', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 692.832430] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating folder: Project (bf54348a3d0948cfa816cc3746e86806). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.832780] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d5d70c6-7108-470d-8d7b-a10b4b46d934 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.849588] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created folder: Project (bf54348a3d0948cfa816cc3746e86806) in parent group-v870190. [ 692.849695] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating folder: Instances. Parent ref: group-v870283. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.849949] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78995877-16d5-4ba8-859f-ca848e25ffdb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.863028] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created folder: Instances in parent group-v870283. [ 692.863028] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 692.863289] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 692.863673] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7586ebe7-60d7-4443-8434-898f0a685202 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.887586] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f462c110-e10a-454d-9e21-e005cf7dc88e tempest-ServerDiagnosticsNegativeTest-1397181397 tempest-ServerDiagnosticsNegativeTest-1397181397-project-member] Lock "d8d917c1-224c-4773-a911-d09f3f719e1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.543s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.891423] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 692.891423] env[65503]: value = "task-4449663" [ 692.891423] env[65503]: _type = "Task" [ 692.891423] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.905280] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449663, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.953837] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.043503] env[65503]: INFO nova.compute.manager [-] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Took 2.34 seconds to deallocate network for instance. [ 693.127077] env[65503]: DEBUG nova.network.neutron [-] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 693.304511] env[65503]: DEBUG nova.compute.manager [req-a06fb43b-bca3-4e60-bdcd-a6e95828f6f9 req-21684cca-2725-45e8-a49c-cec1687303bc service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-vif-deleted-3c1610d7-879b-45f7-9a0e-9f341fd61000 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 693.323777] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449660, 'name': Destroy_Task, 'duration_secs': 0.442025} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.325067] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Destroyed the VM [ 693.325067] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 693.325269] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-68c928af-2e6e-47ce-8c40-15425c5a4016 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.337587] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 693.337587] env[65503]: value = "task-4449664" [ 693.337587] env[65503]: _type = "Task" [ 693.337587] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.352035] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449664, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.403464] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449663, 'name': CreateVM_Task, 'duration_secs': 0.38767} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.406756] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.407579] env[65503]: WARNING neutronclient.v2_0.client [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 693.407888] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.408347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.408347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 693.408850] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e72e57cf-6d91-4ddf-9206-8fc110620fb2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.416968] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 693.416968] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f20bb9-0c52-3c6f-fb31-5c2a887b4c28" [ 693.416968] env[65503]: _type = "Task" [ 693.416968] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.432040] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f20bb9-0c52-3c6f-fb31-5c2a887b4c28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.552491] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.630909] env[65503]: INFO nova.compute.manager [-] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Took 1.53 seconds to deallocate network for instance. [ 693.713702] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.713702] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.854047] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449664, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.932859] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f20bb9-0c52-3c6f-fb31-5c2a887b4c28, 'name': SearchDatastore_Task, 'duration_secs': 0.017933} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.932859] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.933149] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.933409] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.933554] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.934192] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.934192] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92415d04-e835-4a36-9173-5f7ab4dc9a4b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.946584] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.946872] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 693.947691] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39dcdefc-f414-44d3-b24b-0645fc6bcfa2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.959733] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 693.959733] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52add53f-9e76-7231-faca-a1783f3a6ead" [ 693.959733] env[65503]: _type = "Task" [ 693.959733] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.974330] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52add53f-9e76-7231-faca-a1783f3a6ead, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.976718] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c8c707-3ef0-4032-8c36-634e881e744a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.986153] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd9a3a3-bf45-433a-ab59-0512fdc1c8fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.026802] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb17ed8e-7aee-4eb5-8bf0-f382fb490f61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.041071] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0983c9-04d6-4d98-b097-511ae2714cfd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.053933] env[65503]: DEBUG nova.compute.provider_tree [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.144112] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.215886] env[65503]: DEBUG nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 694.352228] env[65503]: DEBUG oslo_vmware.api [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449664, 'name': RemoveSnapshot_Task, 'duration_secs': 0.632984} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.352228] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 694.352228] env[65503]: INFO nova.compute.manager [None req-91c79f7c-5f5c-423b-9fd6-1577cb388f5c tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Took 14.87 seconds to snapshot the instance on the hypervisor. [ 694.475051] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52add53f-9e76-7231-faca-a1783f3a6ead, 'name': SearchDatastore_Task, 'duration_secs': 0.01374} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.475964] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e75c69b-c704-4859-a8c9-3fbbabcca48d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.483561] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 694.483561] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8c6a1-797b-206e-f48b-0a2b3aa6a2b8" [ 694.483561] env[65503]: _type = "Task" [ 694.483561] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.495716] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8c6a1-797b-206e-f48b-0a2b3aa6a2b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.557524] env[65503]: DEBUG nova.scheduler.client.report [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.752741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.784466] env[65503]: DEBUG nova.compute.manager [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Received event network-changed-5a0ecec2-8866-4131-aa0b-e63ba349190f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 694.784846] env[65503]: DEBUG nova.compute.manager [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Refreshing instance network info cache due to event network-changed-5a0ecec2-8866-4131-aa0b-e63ba349190f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 694.785210] env[65503]: DEBUG oslo_concurrency.lockutils [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Acquiring lock "refresh_cache-5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.785430] env[65503]: DEBUG oslo_concurrency.lockutils [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Acquired lock "refresh_cache-5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.786509] env[65503]: DEBUG nova.network.neutron [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Refreshing network info cache for port 5a0ecec2-8866-4131-aa0b-e63ba349190f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 694.999482] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8c6a1-797b-206e-f48b-0a2b3aa6a2b8, 'name': SearchDatastore_Task, 'duration_secs': 0.017224} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.999843] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.999843] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.000186] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9b49804-2f60-4156-906e-a29c29b844c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.010046] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 695.010046] env[65503]: value = "task-4449665" [ 695.010046] env[65503]: _type = "Task" [ 695.010046] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.022963] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.068217] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.068635] env[65503]: DEBUG nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 695.071450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.830s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.073436] env[65503]: INFO nova.compute.claims [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.292562] env[65503]: WARNING neutronclient.v2_0.client [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 695.293390] env[65503]: WARNING openstack [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 695.293830] env[65503]: WARNING openstack [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 695.527109] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449665, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.585019] env[65503]: DEBUG nova.compute.utils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 695.589278] env[65503]: DEBUG nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 695.589485] env[65503]: DEBUG nova.network.neutron [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 695.590379] env[65503]: WARNING neutronclient.v2_0.client [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 695.590379] env[65503]: WARNING neutronclient.v2_0.client [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 695.590763] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 695.591192] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 695.673692] env[65503]: WARNING openstack [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 695.674191] env[65503]: WARNING openstack [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 695.737982] env[65503]: DEBUG nova.policy [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f22edec0eb89460e83952f6649747eca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8d373b14fc34ee69c50f9f7ce58c888', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.923040] env[65503]: WARNING neutronclient.v2_0.client [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 695.923891] env[65503]: WARNING openstack [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 695.924518] env[65503]: WARNING openstack [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 696.034903] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653332} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.034903] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 696.034903] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 696.034903] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d4a91bb-2af5-4662-9223-e1f12d56dbae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.035434] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 696.035434] env[65503]: value = "task-4449666" [ 696.035434] env[65503]: _type = "Task" [ 696.035434] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.046436] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.101724] env[65503]: DEBUG nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 696.228481] env[65503]: DEBUG nova.network.neutron [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Successfully created port: 1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 696.547480] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079886} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.547760] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.548667] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0f37ed-02be-4cfe-b688-b9009837b57d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.581136] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 696.585089] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7aab6e8-334f-413c-86c5-5ebdab31345a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.614211] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 696.614211] env[65503]: value = "task-4449667" [ 696.614211] env[65503]: _type = "Task" [ 696.614211] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.630357] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449667, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.673198] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1f066a-c18a-4969-a3fa-d73cd2cad4a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.684714] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d167c18c-66e6-4bcd-acc4-506a5fb624b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.718038] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19ddead-3031-4b4f-8832-568ac294d1b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.722755] env[65503]: DEBUG nova.network.neutron [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Updated VIF entry in instance network info cache for port 5a0ecec2-8866-4131-aa0b-e63ba349190f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 696.723130] env[65503]: DEBUG nova.network.neutron [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Updating instance_info_cache with network_info: [{"id": "5a0ecec2-8866-4131-aa0b-e63ba349190f", "address": "fa:16:3e:0b:29:6d", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a0ecec2-88", "ovs_interfaceid": "5a0ecec2-8866-4131-aa0b-e63ba349190f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 696.730696] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbb0bb4-9df7-4dba-a7ee-660fac495562 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.747169] env[65503]: DEBUG nova.compute.provider_tree [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.119326] env[65503]: DEBUG nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 697.135672] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449667, 'name': ReconfigVM_Task, 'duration_secs': 0.349442} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.135968] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 697.137138] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2dccaf2-9d90-4b5e-a3ab-1f95c05dff37 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.145134] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 697.145134] env[65503]: value = "task-4449668" [ 697.145134] env[65503]: _type = "Task" [ 697.145134] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.154516] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449668, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.162998] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 697.163262] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 697.163421] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 697.163610] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 697.163867] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 697.163945] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 697.164096] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.164312] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 697.164406] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 697.164558] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 697.164721] env[65503]: DEBUG nova.virt.hardware [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 697.165778] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b09c60c-3c90-4a5e-a445-d000fca914c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.176174] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da32836-937f-4fcf-8155-5394dfc547ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.226246] env[65503]: DEBUG oslo_concurrency.lockutils [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] Releasing lock "refresh_cache-5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.226551] env[65503]: DEBUG nova.compute.manager [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Received event network-vif-deleted-fc1177b8-43dc-4887-a8c1-5f9ebadba2be {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 697.226737] env[65503]: DEBUG nova.compute.manager [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-vif-deleted-f78a3ed0-4f14-45aa-afd6-01ab10698376 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 697.226904] env[65503]: DEBUG nova.compute.manager [req-3a91f51f-504d-4f79-b00e-61252e20ec28 req-bace71c5-dd1f-4ead-aeb9-3881d9f53332 service nova] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Received event network-vif-deleted-ca808092-6ae6-418d-9074-0f8e4b10289a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 697.250601] env[65503]: DEBUG nova.scheduler.client.report [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.656939] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449668, 'name': Rename_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.756650] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.757116] env[65503]: DEBUG nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 697.764170] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.892s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.765863] env[65503]: INFO nova.compute.claims [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.042750] env[65503]: DEBUG nova.network.neutron [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Successfully updated port: 1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 698.157560] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449668, 'name': Rename_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.266764] env[65503]: DEBUG nova.compute.utils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 698.269168] env[65503]: DEBUG nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 698.269457] env[65503]: DEBUG nova.network.neutron [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 698.269884] env[65503]: WARNING neutronclient.v2_0.client [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 698.270274] env[65503]: WARNING neutronclient.v2_0.client [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 698.271036] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 698.271435] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 698.480546] env[65503]: DEBUG nova.compute.manager [req-2cbdb413-6fbc-41b7-8cf5-4721e5a6ab85 req-8071b2b2-a4d0-4f24-b178-feb96e9e5e89 service nova] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Received event network-vif-deleted-0aa00537-b95b-4252-b80d-90e59542088b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 698.546509] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.546658] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquired lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.547279] env[65503]: DEBUG nova.network.neutron [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 698.661184] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449668, 'name': Rename_Task, 'duration_secs': 1.167929} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.661450] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.661767] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91ea0875-ea6d-4f7d-b649-9e5d1fb46ea6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.670827] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 698.670827] env[65503]: value = "task-4449669" [ 698.670827] env[65503]: _type = "Task" [ 698.670827] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.681352] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449669, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.781985] env[65503]: DEBUG nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 698.979669] env[65503]: DEBUG nova.policy [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52823a9fdd97459fa4bc57df3a8cbbc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb3b7254cf72404d805209ff11130a1e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 699.052024] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 699.052024] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 699.187508] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449669, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.293639] env[65503]: INFO nova.virt.block_device [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Booting with volume 760148f4-1011-4972-a0ed-b18b4fd3f079 at /dev/sda [ 699.339125] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "16d508f9-72f0-4853-92fb-c8c7a37b5668" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.339521] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.339838] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "16d508f9-72f0-4853-92fb-c8c7a37b5668-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.340162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.340427] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.343664] env[65503]: INFO nova.compute.manager [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Terminating instance [ 699.360519] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7d5047f-3553-40d5-bf19-e82ccadd54c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.373824] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b635dd-f900-4277-b5e4-eb7eed81a23d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.417372] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf231b4e-fccf-4603-8d29-3747e074fe48 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.420120] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57474486-65f7-4a3a-91b6-0fb66623bf46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.430075] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce73488-6310-4f5e-9520-ba3e9f3a6c54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.437744] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee93a6f-ed26-4731-b0f9-9165f4d5f2b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.497057] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd4af66-8e06-4c30-b035-4ec726e8e490 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.501364] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7809df59-9c3e-4355-9ebd-ac94fb8f6ccc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.511348] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3499f33-c4dd-4a9b-a162-350e7c7ba0be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.515153] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf484a8e-ede8-4310-b359-4d6d9520e98d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.530481] env[65503]: DEBUG nova.compute.provider_tree [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.541914] env[65503]: DEBUG nova.virt.block_device [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Updating existing volume attachment record: 2025efe7-3e5c-4da2-9e8a-70f57815483c {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 699.685830] env[65503]: DEBUG oslo_vmware.api [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449669, 'name': PowerOnVM_Task, 'duration_secs': 0.687318} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.686163] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.686435] env[65503]: INFO nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Took 10.11 seconds to spawn the instance on the hypervisor. [ 699.688785] env[65503]: DEBUG nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 699.688785] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16d4c0d-f638-4d29-910f-da8cbe9c16af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.850884] env[65503]: DEBUG nova.compute.manager [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 699.851148] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.852124] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f6a161-3277-47d3-9af4-925b92e71766 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.860974] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 699.863340] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a6e31b-f9b6-1285-0b0a-04368a26f7da/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 699.863695] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c46a75a0-1176-44be-a5a6-b2dca1b6e753 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.866488] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed8df8e-ac76-4e3d-808f-68b29b989df0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.877912] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a6e31b-f9b6-1285-0b0a-04368a26f7da/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 699.878226] env[65503]: ERROR oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a6e31b-f9b6-1285-0b0a-04368a26f7da/disk-0.vmdk due to incomplete transfer. [ 699.880768] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a45cf9c7-1398-4fb9-b6a9-cfea90624497 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.882795] env[65503]: DEBUG oslo_vmware.api [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 699.882795] env[65503]: value = "task-4449670" [ 699.882795] env[65503]: _type = "Task" [ 699.882795] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.894250] env[65503]: DEBUG oslo_vmware.api [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.895769] env[65503]: DEBUG oslo_vmware.rw_handles [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a6e31b-f9b6-1285-0b0a-04368a26f7da/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 699.895989] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Uploaded image 4f4c9f00-a608-40ff-97e6-7644f89694ed to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 699.897621] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 699.902454] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e495ceff-b454-4ae2-87cd-488861d8130a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.908270] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 699.908270] env[65503]: value = "task-4449671" [ 699.908270] env[65503]: _type = "Task" [ 699.908270] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.921572] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449671, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.934131] env[65503]: DEBUG nova.network.neutron [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 700.037247] env[65503]: DEBUG nova.scheduler.client.report [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.212097] env[65503]: INFO nova.compute.manager [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Took 37.94 seconds to build instance. [ 700.394563] env[65503]: DEBUG oslo_vmware.api [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449670, 'name': PowerOffVM_Task, 'duration_secs': 0.331386} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.395806] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 700.395806] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 700.395806] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b992548-b90b-4201-bbca-de9f5dfbac27 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.420838] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449671, 'name': Destroy_Task, 'duration_secs': 0.368986} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.420838] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Destroyed the VM [ 700.421535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 700.421535] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-26bf3705-f2e6-4fea-9e58-ee4921a71f01 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.430038] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 700.430038] env[65503]: value = "task-4449673" [ 700.430038] env[65503]: _type = "Task" [ 700.430038] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.441953] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449673, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.472753] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 700.473101] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 700.473300] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Deleting the datastore file [datastore2] 16d508f9-72f0-4853-92fb-c8c7a37b5668 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 700.473663] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0442969a-a660-41cf-91ce-75c5835cdaed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.481754] env[65503]: DEBUG oslo_vmware.api [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for the task: (returnval){ [ 700.481754] env[65503]: value = "task-4449674" [ 700.481754] env[65503]: _type = "Task" [ 700.481754] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.491659] env[65503]: DEBUG oslo_vmware.api [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.545636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.781s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.546054] env[65503]: DEBUG nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 700.550446] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.770s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.552101] env[65503]: INFO nova.compute.claims [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.714962] env[65503]: DEBUG oslo_concurrency.lockutils [None req-292d86b0-023d-4894-98d7-1a3437a5bd8b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.453s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.943345] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449673, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.003344] env[65503]: DEBUG oslo_vmware.api [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Task: {'id': task-4449674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312964} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.004739] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 701.005296] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 701.012036] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.012238] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 701.012553] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 701.012553] env[65503]: INFO nova.compute.manager [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Took 1.16 seconds to destroy the instance on the hypervisor. [ 701.012857] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 701.013304] env[65503]: DEBUG nova.compute.manager [-] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 701.013698] env[65503]: DEBUG nova.network.neutron [-] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 701.015039] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.015039] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 701.015039] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 701.060019] env[65503]: DEBUG nova.compute.utils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 701.061977] env[65503]: DEBUG nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 701.062374] env[65503]: DEBUG nova.network.neutron [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 701.062813] env[65503]: WARNING neutronclient.v2_0.client [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.063447] env[65503]: WARNING neutronclient.v2_0.client [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.064288] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 701.064803] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 701.076922] env[65503]: DEBUG nova.compute.manager [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received event network-changed-f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 701.079893] env[65503]: DEBUG nova.compute.manager [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Refreshing instance network info cache due to event network-changed-f191aa02-3240-4647-9358-ee80ef3eb29d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 701.079893] env[65503]: DEBUG oslo_concurrency.lockutils [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Acquiring lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.079893] env[65503]: DEBUG oslo_concurrency.lockutils [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Acquired lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.079893] env[65503]: DEBUG nova.network.neutron [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Refreshing network info cache for port f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 701.299190] env[65503]: DEBUG nova.network.neutron [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Successfully created port: 26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 701.384426] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.422557] env[65503]: DEBUG nova.policy [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ddb7dc8858e4aa09c61dc232cb465eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5de0ae091db74426975a523e945110fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 701.442795] env[65503]: DEBUG oslo_vmware.api [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449673, 'name': RemoveSnapshot_Task, 'duration_secs': 0.573797} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.443065] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 701.443319] env[65503]: INFO nova.compute.manager [None req-dcdbc412-8f1e-4926-af86-c2c4d226d392 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Took 15.74 seconds to snapshot the instance on the hypervisor. [ 701.577377] env[65503]: DEBUG nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 701.586358] env[65503]: WARNING neutronclient.v2_0.client [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.587018] env[65503]: WARNING openstack [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 701.587389] env[65503]: WARNING openstack [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 701.709889] env[65503]: DEBUG nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 701.710460] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 701.710731] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 701.710829] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 701.710996] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 701.711197] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 701.711352] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 701.711549] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 701.711708] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 701.711849] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 701.711996] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 701.712171] env[65503]: DEBUG nova.virt.hardware [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 701.720243] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0204019-7e69-4c12-a20a-489c42484836 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.731743] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cecba5-72d0-417d-84c4-cd1eb133d7e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.119941] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd666d79-04f9-4ca5-8fb1-92ce3c5f08ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.129322] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd138bc6-43e9-4ebd-a8e9-a61a466d9651 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.134690] env[65503]: WARNING neutronclient.v2_0.client [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 702.135362] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 702.135737] env[65503]: WARNING openstack [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 702.177057] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc190dc-0a94-419d-9de9-69fffdc418b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.186705] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fea6ea-a021-4d26-aa99-9eb0f85d204d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.204778] env[65503]: DEBUG nova.compute.provider_tree [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.590592] env[65503]: DEBUG nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 702.625212] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 702.625806] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 702.625806] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 702.625806] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 702.625940] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 702.626231] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 702.626445] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.626598] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 702.626760] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 702.626912] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 702.628762] env[65503]: DEBUG nova.virt.hardware [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 702.628762] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b74d7bb-55a7-4165-89d0-1534e92017ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.637999] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f89888-b602-4ae4-abea-ad7e1ff83b31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.711302] env[65503]: DEBUG nova.scheduler.client.report [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.746402] env[65503]: DEBUG nova.network.neutron [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Successfully created port: bd140c8d-63d1-4c8c-a14e-2f8ce80a648c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 702.919397] env[65503]: DEBUG nova.network.neutron [-] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 703.138591] env[65503]: DEBUG nova.network.neutron [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Successfully updated port: 26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 703.215520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.216306] env[65503]: DEBUG nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 703.219734] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.438s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.222228] env[65503]: INFO nova.compute.claims [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.255018] env[65503]: DEBUG nova.network.neutron [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updating instance_info_cache with network_info: [{"id": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "address": "fa:16:3e:cf:06:d8", "network": {"id": "234e0d20-0522-4720-b75f-e1246236d495", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1433648354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8d373b14fc34ee69c50f9f7ce58c888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e1fc8a1-f7", "ovs_interfaceid": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 703.343813] env[65503]: WARNING openstack [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 703.344223] env[65503]: WARNING openstack [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 703.422771] env[65503]: INFO nova.compute.manager [-] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Took 2.41 seconds to deallocate network for instance. [ 703.465511] env[65503]: WARNING neutronclient.v2_0.client [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 703.466184] env[65503]: WARNING openstack [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 703.466527] env[65503]: WARNING openstack [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 703.644029] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Acquiring lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.644029] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Acquired lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.644029] env[65503]: DEBUG nova.network.neutron [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 703.675802] env[65503]: DEBUG nova.network.neutron [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updated VIF entry in instance network info cache for port f191aa02-3240-4647-9358-ee80ef3eb29d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 703.676176] env[65503]: DEBUG nova.network.neutron [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 703.721673] env[65503]: DEBUG nova.compute.utils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 703.723245] env[65503]: DEBUG nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 703.723861] env[65503]: DEBUG nova.network.neutron [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 703.724240] env[65503]: WARNING neutronclient.v2_0.client [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 703.724700] env[65503]: WARNING neutronclient.v2_0.client [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 703.728195] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 703.728195] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 703.738305] env[65503]: DEBUG nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 703.745614] env[65503]: DEBUG nova.compute.manager [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Received event network-vif-plugged-1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 703.745825] env[65503]: DEBUG oslo_concurrency.lockutils [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Acquiring lock "8f0bf665-b21b-42ed-816d-69dee2f40654-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.746049] env[65503]: DEBUG oslo_concurrency.lockutils [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.746214] env[65503]: DEBUG oslo_concurrency.lockutils [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.746371] env[65503]: DEBUG nova.compute.manager [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] No waiting events found dispatching network-vif-plugged-1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 703.746526] env[65503]: WARNING nova.compute.manager [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Received unexpected event network-vif-plugged-1e1fc8a1-f7e8-49f4-b328-b7f029f59874 for instance with vm_state building and task_state spawning. [ 703.746671] env[65503]: DEBUG nova.compute.manager [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Received event network-changed-1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 703.746810] env[65503]: DEBUG nova.compute.manager [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Refreshing instance network info cache due to event network-changed-1e1fc8a1-f7e8-49f4-b328-b7f029f59874. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 703.746964] env[65503]: DEBUG oslo_concurrency.lockutils [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Acquiring lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.760433] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Releasing lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.760637] env[65503]: DEBUG nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Instance network_info: |[{"id": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "address": "fa:16:3e:cf:06:d8", "network": {"id": "234e0d20-0522-4720-b75f-e1246236d495", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1433648354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8d373b14fc34ee69c50f9f7ce58c888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e1fc8a1-f7", "ovs_interfaceid": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 703.761290] env[65503]: DEBUG oslo_concurrency.lockutils [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Acquired lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.761462] env[65503]: DEBUG nova.network.neutron [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Refreshing network info cache for port 1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 703.763186] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:06:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '750b5f9b-f78a-4650-9153-c5bb117e507c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e1fc8a1-f7e8-49f4-b328-b7f029f59874', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 703.771988] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Creating folder: Project (a8d373b14fc34ee69c50f9f7ce58c888). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 703.772714] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb0052f1-23f2-4f90-8858-0a92a9abf7f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.786136] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Created folder: Project (a8d373b14fc34ee69c50f9f7ce58c888) in parent group-v870190. [ 703.786355] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Creating folder: Instances. Parent ref: group-v870286. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 703.786618] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a6f06b4-0a72-447c-92ee-24f3019232e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.792088] env[65503]: DEBUG nova.policy [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b272850d169441a9805fc08a13f1592', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2bb13f406914efb8b6ae36d052c8f21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 703.805804] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Created folder: Instances in parent group-v870286. [ 703.805804] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 703.805988] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 703.806712] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8949005-cdbf-41b6-a68f-17e1608dbfc8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.828285] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 703.828285] env[65503]: value = "task-4449677" [ 703.828285] env[65503]: _type = "Task" [ 703.828285] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.838271] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449677, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.935760] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.145188] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 704.145643] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 704.164545] env[65503]: DEBUG nova.network.neutron [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Successfully created port: 639772e3-599d-4f7e-81ad-21f2c2f49bbe {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 704.182716] env[65503]: DEBUG oslo_concurrency.lockutils [req-6ac53b6e-ca60-481b-b3de-55513618c8f3 req-085b27c3-0deb-49e4-aa49-4523be546c52 service nova] Releasing lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.239492] env[65503]: DEBUG nova.network.neutron [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 704.272788] env[65503]: WARNING neutronclient.v2_0.client [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 704.273541] env[65503]: WARNING openstack [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 704.275029] env[65503]: WARNING openstack [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 704.341891] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449677, 'name': CreateVM_Task, 'duration_secs': 0.333383} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.342179] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 704.343417] env[65503]: WARNING neutronclient.v2_0.client [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 704.343417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.343417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.343714] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 704.346584] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98f90ca8-67ed-461a-93a7-0f8e921172ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.355318] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 704.355318] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5279406f-5beb-6afc-b495-74f7050c1b18" [ 704.355318] env[65503]: _type = "Task" [ 704.355318] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.371292] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5279406f-5beb-6afc-b495-74f7050c1b18, 'name': SearchDatastore_Task, 'duration_secs': 0.010389} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.371683] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.371918] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 704.372259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.372419] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.372672] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 704.373031] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08b04bb3-02e3-4121-a6f3-1805b6b2d744 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.383393] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 704.383578] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 704.384334] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a531a932-275d-4cf1-a4d4-2c00bed95098 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.392641] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 704.392641] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c7546-cb05-c50e-13c5-266ee09d7325" [ 704.392641] env[65503]: _type = "Task" [ 704.392641] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.402238] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c7546-cb05-c50e-13c5-266ee09d7325, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.427253] env[65503]: DEBUG nova.network.neutron [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Successfully updated port: bd140c8d-63d1-4c8c-a14e-2f8ce80a648c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 704.445182] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 704.445578] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 704.656859] env[65503]: WARNING neutronclient.v2_0.client [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 704.657569] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 704.657927] env[65503]: WARNING openstack [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 704.671628] env[65503]: WARNING openstack [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 704.672015] env[65503]: WARNING openstack [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 704.745021] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2f10d4-2c5c-4749-8ebf-e96465b9194d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.756567] env[65503]: DEBUG nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 704.759181] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ada440d-7614-41e8-81eb-d67868e70020 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.797542] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45d6ee7-e0c6-4959-ae9a-8e229b09450c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.808689] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 704.808916] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.809116] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 704.809312] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.809453] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 704.809591] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 704.809790] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.809936] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 704.810109] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 704.810268] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 704.810436] env[65503]: DEBUG nova.virt.hardware [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 704.811771] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97975694-633a-4d50-957f-df2faaecffc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.816459] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23197a28-316f-4a5a-992d-5eec0cfb5acb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.832595] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc93a3c2-3c75-47af-b49c-a4645c264ea7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.836894] env[65503]: DEBUG nova.compute.provider_tree [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.904843] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c7546-cb05-c50e-13c5-266ee09d7325, 'name': SearchDatastore_Task, 'duration_secs': 0.009703} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.905762] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2566930c-869e-419a-8f31-eadbae81e939 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.911954] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 704.911954] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286e0bc-8387-b9c8-e11d-ceb504233c28" [ 704.911954] env[65503]: _type = "Task" [ 704.911954] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.921519] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286e0bc-8387-b9c8-e11d-ceb504233c28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.930333] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.930519] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.930794] env[65503]: DEBUG nova.network.neutron [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 705.190256] env[65503]: DEBUG nova.network.neutron [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Updating instance_info_cache with network_info: [{"id": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "address": "fa:16:3e:74:d8:98", "network": {"id": "5551750c-9fbd-4b2e-a507-602c3bec1c4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1477558748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb3b7254cf72404d805209ff11130a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26c792db-2c", "ovs_interfaceid": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 705.217885] env[65503]: WARNING neutronclient.v2_0.client [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 705.219806] env[65503]: WARNING openstack [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 705.219806] env[65503]: WARNING openstack [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 705.341115] env[65503]: DEBUG nova.scheduler.client.report [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.408025] env[65503]: DEBUG nova.network.neutron [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updated VIF entry in instance network info cache for port 1e1fc8a1-f7e8-49f4-b328-b7f029f59874. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 705.408025] env[65503]: DEBUG nova.network.neutron [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updating instance_info_cache with network_info: [{"id": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "address": "fa:16:3e:cf:06:d8", "network": {"id": "234e0d20-0522-4720-b75f-e1246236d495", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1433648354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8d373b14fc34ee69c50f9f7ce58c888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e1fc8a1-f7", "ovs_interfaceid": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 705.427998] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286e0bc-8387-b9c8-e11d-ceb504233c28, 'name': SearchDatastore_Task, 'duration_secs': 0.010315} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.427998] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.427998] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 8f0bf665-b21b-42ed-816d-69dee2f40654/8f0bf665-b21b-42ed-816d-69dee2f40654.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 705.428314] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30f288cd-59ea-4a46-a1c3-2f2729cf4f52 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.433732] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 705.434140] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 705.443407] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 705.443407] env[65503]: value = "task-4449678" [ 705.443407] env[65503]: _type = "Task" [ 705.443407] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.454786] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449678, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.497261] env[65503]: DEBUG nova.network.neutron [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 705.614944] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 705.614944] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 705.693830] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Releasing lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.694154] env[65503]: DEBUG nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Instance network_info: |[{"id": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "address": "fa:16:3e:74:d8:98", "network": {"id": "5551750c-9fbd-4b2e-a507-602c3bec1c4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1477558748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb3b7254cf72404d805209ff11130a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26c792db-2c", "ovs_interfaceid": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 705.694613] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:d8:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26c792db-2c05-4dd4-8223-013b2d5d3f9f', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.706132] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Creating folder: Project (cb3b7254cf72404d805209ff11130a1e). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.716023] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1817321b-73a9-4a87-94a0-8af18cc7add8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.736249] env[65503]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 705.736249] env[65503]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=65503) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 705.736249] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Folder already exists: Project (cb3b7254cf72404d805209ff11130a1e). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 705.736249] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Creating folder: Instances. Parent ref: group-v870246. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.736249] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-464be6c0-c136-40a6-8515-d8da41e519dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.757193] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Created folder: Instances in parent group-v870246. [ 705.757193] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 705.757400] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.757656] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d650bca-d998-467c-b06e-b9632045d4a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.783388] env[65503]: WARNING neutronclient.v2_0.client [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 705.784815] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 705.784815] env[65503]: WARNING openstack [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 705.801421] env[65503]: DEBUG nova.network.neutron [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Successfully updated port: 639772e3-599d-4f7e-81ad-21f2c2f49bbe {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 705.806620] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.806620] env[65503]: value = "task-4449681" [ 705.806620] env[65503]: _type = "Task" [ 705.806620] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.819407] env[65503]: DEBUG nova.compute.manager [req-024ef405-59d4-4e04-850f-964b66219715 req-35a02300-995a-453b-85e8-3dc5dc33f174 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Received event network-vif-plugged-26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 705.819407] env[65503]: DEBUG oslo_concurrency.lockutils [req-024ef405-59d4-4e04-850f-964b66219715 req-35a02300-995a-453b-85e8-3dc5dc33f174 service nova] Acquiring lock "38e9a714-87f8-422c-9cc5-09b6aec76198-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.819407] env[65503]: DEBUG oslo_concurrency.lockutils [req-024ef405-59d4-4e04-850f-964b66219715 req-35a02300-995a-453b-85e8-3dc5dc33f174 service nova] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.819407] env[65503]: DEBUG oslo_concurrency.lockutils [req-024ef405-59d4-4e04-850f-964b66219715 req-35a02300-995a-453b-85e8-3dc5dc33f174 service nova] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.819407] env[65503]: DEBUG nova.compute.manager [req-024ef405-59d4-4e04-850f-964b66219715 req-35a02300-995a-453b-85e8-3dc5dc33f174 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] No waiting events found dispatching network-vif-plugged-26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 705.819407] env[65503]: WARNING nova.compute.manager [req-024ef405-59d4-4e04-850f-964b66219715 req-35a02300-995a-453b-85e8-3dc5dc33f174 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Received unexpected event network-vif-plugged-26c792db-2c05-4dd4-8223-013b2d5d3f9f for instance with vm_state building and task_state spawning. [ 705.823884] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449681, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.848251] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.850217] env[65503]: DEBUG nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 705.854506] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.598s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.854821] env[65503]: DEBUG nova.objects.instance [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lazy-loading 'resources' on Instance uuid 24e054d7-7662-47ef-8f69-4738c5ff9548 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 705.909034] env[65503]: DEBUG oslo_concurrency.lockutils [req-6404e28a-1f18-404b-bce9-085b977f10c5 req-1dee60fb-8d90-476d-8383-abd85c4e76b1 service nova] Releasing lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.933203] env[65503]: DEBUG nova.network.neutron [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Updating instance_info_cache with network_info: [{"id": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "address": "fa:16:3e:bc:d7:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd140c8d-63", "ovs_interfaceid": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 705.957377] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449678, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497875} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.957377] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 8f0bf665-b21b-42ed-816d-69dee2f40654/8f0bf665-b21b-42ed-816d-69dee2f40654.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 705.957377] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 705.957591] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef945f7d-52dc-45b8-b827-b50ff43b5593 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.969686] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 705.969686] env[65503]: value = "task-4449682" [ 705.969686] env[65503]: _type = "Task" [ 705.969686] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.987955] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.305226] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "refresh_cache-34008711-b51b-467b-b972-bfda1023d696" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.305501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquired lock "refresh_cache-34008711-b51b-467b-b972-bfda1023d696" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.305744] env[65503]: DEBUG nova.network.neutron [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 706.323753] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449681, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.356853] env[65503]: DEBUG nova.compute.utils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 706.359020] env[65503]: DEBUG nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 706.359344] env[65503]: DEBUG nova.network.neutron [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 706.359673] env[65503]: WARNING neutronclient.v2_0.client [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 706.359955] env[65503]: WARNING neutronclient.v2_0.client [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 706.360569] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 706.360907] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 706.433216] env[65503]: DEBUG nova.policy [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3452201659f649318555efae6850316c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebf44cd2ee0e4906bcdc3d16dfe7c838', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 706.439310] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.439659] env[65503]: DEBUG nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Instance network_info: |[{"id": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "address": "fa:16:3e:bc:d7:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd140c8d-63", "ovs_interfaceid": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 706.443810] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:d7:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd140c8d-63d1-4c8c-a14e-2f8ce80a648c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.451994] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 706.452684] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "5e2cf383-312b-404f-acff-2ecb75678600" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.452684] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "5e2cf383-312b-404f-acff-2ecb75678600" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.453033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "5e2cf383-312b-404f-acff-2ecb75678600-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.453114] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "5e2cf383-312b-404f-acff-2ecb75678600-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.453344] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "5e2cf383-312b-404f-acff-2ecb75678600-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.458370] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.459246] env[65503]: INFO nova.compute.manager [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Terminating instance [ 706.461430] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdc48e5e-704c-4fd4-9d1a-c6f50f3158c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.494108] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.494220] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.499738] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.499738] env[65503]: value = "task-4449683" [ 706.499738] env[65503]: _type = "Task" [ 706.499738] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.503744] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07083} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.510369] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 706.512529] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84da97e-3324-466f-b1cd-b9f3b6a4b823 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.523337] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449683, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.546192] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 8f0bf665-b21b-42ed-816d-69dee2f40654/8f0bf665-b21b-42ed-816d-69dee2f40654.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 706.550369] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb165bab-1a31-4250-bac7-80e755c671f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.577025] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 706.577025] env[65503]: value = "task-4449684" [ 706.577025] env[65503]: _type = "Task" [ 706.577025] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.585914] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449684, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.815187] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 706.815632] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 706.825823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "9297d849-a966-48da-ba6a-453c42b99e44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.827145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "9297d849-a966-48da-ba6a-453c42b99e44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.827145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "9297d849-a966-48da-ba6a-453c42b99e44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.827145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "9297d849-a966-48da-ba6a-453c42b99e44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.827145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "9297d849-a966-48da-ba6a-453c42b99e44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.831950] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449681, 'name': CreateVM_Task, 'duration_secs': 0.548529} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.832474] env[65503]: INFO nova.compute.manager [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Terminating instance [ 706.833857] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.834321] env[65503]: WARNING neutronclient.v2_0.client [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 706.834671] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'device_type': None, 'attachment_id': '2025efe7-3e5c-4da2-9e8a-70f57815483c', 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870259', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'name': 'volume-760148f4-1011-4972-a0ed-b18b4fd3f079', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '38e9a714-87f8-422c-9cc5-09b6aec76198', 'attached_at': '', 'detached_at': '', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'serial': '760148f4-1011-4972-a0ed-b18b4fd3f079'}, 'volume_type': None}], 'swap': None} {{(pid=65503) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 706.834865] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Root volume attach. Driver type: vmdk {{(pid=65503) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 706.836618] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c22c4b7-868a-43a5-a385-28900d90446d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.844681] env[65503]: DEBUG nova.network.neutron [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Successfully created port: 7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 706.851353] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a37f6ab-a447-42c2-842d-7672620664a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.859876] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ded9ad4-adf8-454b-968a-104af546d256 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.871550] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6e15edb2-84ae-4016-b78d-0eedc4527666 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.875184] env[65503]: DEBUG nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 706.884397] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 706.884397] env[65503]: value = "task-4449685" [ 706.884397] env[65503]: _type = "Task" [ 706.884397] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.889424] env[65503]: DEBUG nova.network.neutron [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 706.898692] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449685, 'name': RelocateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.901897] env[65503]: DEBUG nova.compute.manager [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Received event network-vif-deleted-24cf9d48-62c6-4756-bdcc-5008383a037b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 706.901897] env[65503]: DEBUG nova.compute.manager [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Received event network-vif-plugged-bd140c8d-63d1-4c8c-a14e-2f8ce80a648c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 706.902074] env[65503]: DEBUG oslo_concurrency.lockutils [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Acquiring lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.902322] env[65503]: DEBUG oslo_concurrency.lockutils [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.902490] env[65503]: DEBUG oslo_concurrency.lockutils [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.902650] env[65503]: DEBUG nova.compute.manager [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] No waiting events found dispatching network-vif-plugged-bd140c8d-63d1-4c8c-a14e-2f8ce80a648c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 706.902806] env[65503]: WARNING nova.compute.manager [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Received unexpected event network-vif-plugged-bd140c8d-63d1-4c8c-a14e-2f8ce80a648c for instance with vm_state building and task_state spawning. [ 706.902956] env[65503]: DEBUG nova.compute.manager [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Received event network-changed-bd140c8d-63d1-4c8c-a14e-2f8ce80a648c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 706.904013] env[65503]: DEBUG nova.compute.manager [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Refreshing instance network info cache due to event network-changed-bd140c8d-63d1-4c8c-a14e-2f8ce80a648c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 706.904013] env[65503]: DEBUG oslo_concurrency.lockutils [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Acquiring lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.904013] env[65503]: DEBUG oslo_concurrency.lockutils [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Acquired lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.904013] env[65503]: DEBUG nova.network.neutron [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Refreshing network info cache for port bd140c8d-63d1-4c8c-a14e-2f8ce80a648c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 706.948109] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939a2541-d1ac-494a-9727-adf13b2fea9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.957661] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac934f3e-5b56-412b-911f-bf5a3b10e89a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.995942] env[65503]: DEBUG nova.compute.manager [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 706.996278] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 706.997633] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a07ce52-8152-4138-9257-a2b67c7eb0e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.001647] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c7f109-f37c-41a3-8321-5f6d5601934d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.004926] env[65503]: DEBUG nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 707.014895] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 707.017864] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b46851b2-439f-4529-bf63-57410e9b5c1b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.022524] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be98279-32dc-466f-828b-4c2a988b7a9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.031433] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449683, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.045581] env[65503]: DEBUG oslo_vmware.api [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 707.045581] env[65503]: value = "task-4449686" [ 707.045581] env[65503]: _type = "Task" [ 707.045581] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.045581] env[65503]: DEBUG nova.compute.provider_tree [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.056435] env[65503]: DEBUG oslo_vmware.api [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.091972] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449684, 'name': ReconfigVM_Task, 'duration_secs': 0.307364} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.092983] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 8f0bf665-b21b-42ed-816d-69dee2f40654/8f0bf665-b21b-42ed-816d-69dee2f40654.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.093671] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de9fa6da-8dc1-411f-9439-7013bad98feb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.103947] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 707.103947] env[65503]: value = "task-4449687" [ 707.103947] env[65503]: _type = "Task" [ 707.103947] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.116037] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449687, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.252509] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 707.252977] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 707.340555] env[65503]: DEBUG nova.compute.manager [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 707.341307] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 707.341882] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18376f8-49a2-4b77-9e96-f837e01359ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.351421] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 707.351732] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd7e7abb-ee34-4bc4-93bb-a185b06a3796 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.360718] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 707.360718] env[65503]: value = "task-4449688" [ 707.360718] env[65503]: _type = "Task" [ 707.360718] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.370038] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.371110] env[65503]: WARNING neutronclient.v2_0.client [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 707.371733] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 707.372129] env[65503]: WARNING openstack [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 707.397111] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449685, 'name': RelocateVM_Task, 'duration_secs': 0.422804} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.397544] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 707.397951] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870259', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'name': 'volume-760148f4-1011-4972-a0ed-b18b4fd3f079', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '38e9a714-87f8-422c-9cc5-09b6aec76198', 'attached_at': '', 'detached_at': '', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'serial': '760148f4-1011-4972-a0ed-b18b4fd3f079'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 707.398942] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8a4d9f-1b1d-4d3f-96fe-cff1c123ab4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.416320] env[65503]: WARNING neutronclient.v2_0.client [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 707.417012] env[65503]: WARNING openstack [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 707.417365] env[65503]: WARNING openstack [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 707.425921] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee0fa06-741c-4e0a-96a8-6463fc3907ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.451879] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] volume-760148f4-1011-4972-a0ed-b18b4fd3f079/volume-760148f4-1011-4972-a0ed-b18b4fd3f079.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.452834] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15327c4d-ff91-4f20-aea2-daa542fd9749 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.473698] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 707.473698] env[65503]: value = "task-4449689" [ 707.473698] env[65503]: _type = "Task" [ 707.473698] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.487744] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449689, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.518791] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449683, 'name': CreateVM_Task, 'duration_secs': 0.554181} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.521247] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.521996] env[65503]: WARNING neutronclient.v2_0.client [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 707.522479] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.522602] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.522944] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.523257] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570d32b6-d0fc-4db7-8a64-4b97614350fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.529347] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 707.529347] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52523cdf-c3ba-8257-54aa-79e75bf4a994" [ 707.529347] env[65503]: _type = "Task" [ 707.529347] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.538927] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52523cdf-c3ba-8257-54aa-79e75bf4a994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.540078] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.551862] env[65503]: DEBUG nova.scheduler.client.report [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.562218] env[65503]: DEBUG oslo_vmware.api [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449686, 'name': PowerOffVM_Task, 'duration_secs': 0.272748} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.562640] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 707.562829] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 707.563150] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6907841c-be1a-4835-9416-f30dbde4175b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.615643] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449687, 'name': Rename_Task, 'duration_secs': 0.179244} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.616099] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 707.616377] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d6785a9-4b15-4e8e-882e-c09f144da581 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.620524] env[65503]: DEBUG nova.network.neutron [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Updating instance_info_cache with network_info: [{"id": "639772e3-599d-4f7e-81ad-21f2c2f49bbe", "address": "fa:16:3e:db:9d:60", "network": {"id": "8540897b-c867-4372-aa4a-150eda1ef1bb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-671031913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e2bb13f406914efb8b6ae36d052c8f21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap639772e3-59", "ovs_interfaceid": "639772e3-599d-4f7e-81ad-21f2c2f49bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 707.628671] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 707.628671] env[65503]: value = "task-4449691" [ 707.628671] env[65503]: _type = "Task" [ 707.628671] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.635565] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 707.635850] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 707.636060] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Deleting the datastore file [datastore2] 5e2cf383-312b-404f-acff-2ecb75678600 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 707.640019] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f608038-78e0-4204-adae-ffee7fc63442 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.642211] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.648508] env[65503]: DEBUG oslo_vmware.api [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for the task: (returnval){ [ 707.648508] env[65503]: value = "task-4449692" [ 707.648508] env[65503]: _type = "Task" [ 707.648508] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.659690] env[65503]: DEBUG oslo_vmware.api [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.760192] env[65503]: WARNING openstack [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 707.760599] env[65503]: WARNING openstack [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 707.871503] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.876793] env[65503]: WARNING neutronclient.v2_0.client [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 707.879210] env[65503]: WARNING openstack [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 707.879210] env[65503]: WARNING openstack [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 707.887436] env[65503]: DEBUG nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 707.918357] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 707.918609] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 707.918732] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 707.918898] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 707.919066] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 707.919202] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 707.919406] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 707.919575] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 707.919733] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 707.919889] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 707.920071] env[65503]: DEBUG nova.virt.hardware [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 707.921379] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261720fc-4c7b-4897-941f-fe08d741c89b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.931256] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d5afce-ac5c-47f8-b770-9a5ebb7c0050 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.989355] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449689, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.041945] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52523cdf-c3ba-8257-54aa-79e75bf4a994, 'name': SearchDatastore_Task, 'duration_secs': 0.010623} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.042342] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.042627] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.042909] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.043077] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.043257] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.043536] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdb6d194-b493-486d-bbbe-22433dd575d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.054500] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.054732] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.055566] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcd73daf-cd5a-4136-9f0e-18c534dd2687 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.059205] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.064152] env[65503]: DEBUG nova.network.neutron [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Updated VIF entry in instance network info cache for port bd140c8d-63d1-4c8c-a14e-2f8ce80a648c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 708.064532] env[65503]: DEBUG nova.network.neutron [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Updating instance_info_cache with network_info: [{"id": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "address": "fa:16:3e:bc:d7:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd140c8d-63", "ovs_interfaceid": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 708.067437] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.267s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.069092] env[65503]: INFO nova.compute.claims [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.072768] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 708.072768] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5222f5dc-9c37-88e7-ceae-48544f1e6d5b" [ 708.072768] env[65503]: _type = "Task" [ 708.072768] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.083684] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5222f5dc-9c37-88e7-ceae-48544f1e6d5b, 'name': SearchDatastore_Task, 'duration_secs': 0.010805} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.084424] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24df11d8-ae27-4fc2-8ac8-9942df03484d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.092449] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 708.092449] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524d4216-f36a-449f-9960-b9d1b57926c8" [ 708.092449] env[65503]: _type = "Task" [ 708.092449] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.103738] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524d4216-f36a-449f-9960-b9d1b57926c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.108225] env[65503]: INFO nova.scheduler.client.report [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Deleted allocations for instance 24e054d7-7662-47ef-8f69-4738c5ff9548 [ 708.123745] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Releasing lock "refresh_cache-34008711-b51b-467b-b972-bfda1023d696" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.124132] env[65503]: DEBUG nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Instance network_info: |[{"id": "639772e3-599d-4f7e-81ad-21f2c2f49bbe", "address": "fa:16:3e:db:9d:60", "network": {"id": "8540897b-c867-4372-aa4a-150eda1ef1bb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-671031913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e2bb13f406914efb8b6ae36d052c8f21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap639772e3-59", "ovs_interfaceid": "639772e3-599d-4f7e-81ad-21f2c2f49bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 708.124593] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:9d:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd986680e-ad16-45b1-bf6d-cd2fe661679f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '639772e3-599d-4f7e-81ad-21f2c2f49bbe', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.132245] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Creating folder: Project (e2bb13f406914efb8b6ae36d052c8f21). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 708.133307] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9431145f-3d5b-4791-9ce5-eed9df6afb1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.145663] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.149984] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Created folder: Project (e2bb13f406914efb8b6ae36d052c8f21) in parent group-v870190. [ 708.150202] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Creating folder: Instances. Parent ref: group-v870292. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 708.153673] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b88d4341-f49a-44fc-8035-5f8c0a3cb0b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.161434] env[65503]: DEBUG oslo_vmware.api [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Task: {'id': task-4449692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147661} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.161747] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 708.161878] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 708.162071] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 708.162261] env[65503]: INFO nova.compute.manager [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Took 1.17 seconds to destroy the instance on the hypervisor. [ 708.162561] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 708.163439] env[65503]: DEBUG nova.compute.manager [-] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 708.163552] env[65503]: DEBUG nova.network.neutron [-] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 708.163784] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 708.164322] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 708.164578] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 708.227431] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 708.373282] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.376857] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Created folder: Instances in parent group-v870292. [ 708.377119] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 708.377352] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34008711-b51b-467b-b972-bfda1023d696] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 708.377596] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a25f844d-e2f6-4a5f-81e4-e2c4d6a3bb9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.401593] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.401593] env[65503]: value = "task-4449695" [ 708.401593] env[65503]: _type = "Task" [ 708.401593] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.410066] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449695, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.425064] env[65503]: INFO nova.compute.manager [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Rebuilding instance [ 708.493621] env[65503]: DEBUG nova.compute.manager [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 708.495035] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a0e10d-1d51-431d-bc64-827f8855c7e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.504973] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449689, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.576113] env[65503]: DEBUG oslo_concurrency.lockutils [req-1bd81a45-d2a4-4b49-bcaf-b7bd64809fb0 req-0893e49a-2bf0-4f0b-bdde-8f270a3097fd service nova] Releasing lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.608843] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524d4216-f36a-449f-9960-b9d1b57926c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011416} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.609271] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.609519] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 606b8e9f-67c0-4d5c-85ab-ca35f8b31977/606b8e9f-67c0-4d5c-85ab-ca35f8b31977.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.609807] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ca15c6d-6033-44ec-ba40-516a4ce8a87b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.616813] env[65503]: DEBUG oslo_concurrency.lockutils [None req-561cde95-34a0-4f5e-93e6-d785e59c3da0 tempest-DeleteServersAdminTestJSON-1523437843 tempest-DeleteServersAdminTestJSON-1523437843-project-member] Lock "24e054d7-7662-47ef-8f69-4738c5ff9548" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.402s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.620371] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 708.620371] env[65503]: value = "task-4449696" [ 708.620371] env[65503]: _type = "Task" [ 708.620371] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.632692] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449696, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.646031] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449691, 'name': PowerOnVM_Task} progress is 91%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.663148] env[65503]: DEBUG nova.network.neutron [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Successfully updated port: 7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 708.874173] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449688, 'name': PowerOffVM_Task, 'duration_secs': 1.118547} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.874609] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 708.874609] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 708.874955] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3698c88b-b0b9-4cb0-95cf-9c7ca673cf6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.915797] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449695, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.972098] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 708.972550] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 708.972650] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Deleting the datastore file [datastore1] 9297d849-a966-48da-ba6a-453c42b99e44 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 708.972876] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5cb604d0-d673-4ad9-926a-3462db72f9e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.981603] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 708.981603] env[65503]: value = "task-4449698" [ 708.981603] env[65503]: _type = "Task" [ 708.981603] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.995135] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449689, 'name': ReconfigVM_Task, 'duration_secs': 1.325947} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.998871] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Reconfigured VM instance instance-00000020 to attach disk [datastore2] volume-760148f4-1011-4972-a0ed-b18b4fd3f079/volume-760148f4-1011-4972-a0ed-b18b4fd3f079.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.006088] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449698, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.006534] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57ab320e-1fde-45cc-9829-675daa478ff8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.032077] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 709.032077] env[65503]: value = "task-4449699" [ 709.032077] env[65503]: _type = "Task" [ 709.032077] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.044581] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449699, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.050878] env[65503]: DEBUG nova.network.neutron [-] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 709.140630] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449696, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.156728] env[65503]: DEBUG oslo_vmware.api [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449691, 'name': PowerOnVM_Task, 'duration_secs': 1.119881} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.157176] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.157428] env[65503]: INFO nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Took 12.04 seconds to spawn the instance on the hypervisor. [ 709.157614] env[65503]: DEBUG nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 709.158529] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814413fc-e0ef-4563-9b91-02ae4a8f0a10 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.166272] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.166502] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquired lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.166658] env[65503]: DEBUG nova.network.neutron [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 709.413284] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449695, 'name': CreateVM_Task, 'duration_secs': 0.677172} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.416909] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34008711-b51b-467b-b972-bfda1023d696] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 709.417759] env[65503]: WARNING neutronclient.v2_0.client [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 709.418231] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.418406] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.418727] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 709.419015] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7832d89a-5229-4c5c-977b-2d737169763a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.425513] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 709.425513] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525101ec-88c3-e0a6-7886-185a71418351" [ 709.425513] env[65503]: _type = "Task" [ 709.425513] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.438285] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525101ec-88c3-e0a6-7886-185a71418351, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.494710] env[65503]: DEBUG oslo_vmware.api [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449698, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442214} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.496850] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 709.499126] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 709.499126] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 709.499126] env[65503]: INFO nova.compute.manager [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Took 2.16 seconds to destroy the instance on the hypervisor. [ 709.499126] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 709.502697] env[65503]: DEBUG nova.compute.manager [-] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 709.502697] env[65503]: DEBUG nova.network.neutron [-] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 709.502697] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 709.502697] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 709.502697] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 709.508980] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "b6cda94b-2894-4cf0-8522-6593df9723bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.509252] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.523108] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 709.524045] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60e826f6-f8ce-4558-9064-0b8199ffbc80 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.535875] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 709.535875] env[65503]: value = "task-4449700" [ 709.535875] env[65503]: _type = "Task" [ 709.535875] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.544817] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449699, 'name': ReconfigVM_Task, 'duration_secs': 0.180719} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.548550] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870259', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'name': 'volume-760148f4-1011-4972-a0ed-b18b4fd3f079', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '38e9a714-87f8-422c-9cc5-09b6aec76198', 'attached_at': '', 'detached_at': '', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'serial': '760148f4-1011-4972-a0ed-b18b4fd3f079'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 709.549931] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2bacd274-6050-4815-abc5-f1c6ff34b4a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.555733] env[65503]: INFO nova.compute.manager [-] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Took 1.39 seconds to deallocate network for instance. [ 709.556039] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449700, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.557179] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 709.569462] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 709.569462] env[65503]: value = "task-4449701" [ 709.569462] env[65503]: _type = "Task" [ 709.569462] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.583820] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449701, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.639646] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449696, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626519} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.639646] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 606b8e9f-67c0-4d5c-85ab-ca35f8b31977/606b8e9f-67c0-4d5c-85ab-ca35f8b31977.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.639646] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.639646] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffced2aa-81b6-4df6-ba53-7bc50fc6efdf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.650042] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 709.650042] env[65503]: value = "task-4449702" [ 709.650042] env[65503]: _type = "Task" [ 709.650042] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.660853] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.674895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d98cab6-1661-46d2-b63b-d30e160b1d63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.679247] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 709.679488] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 709.703511] env[65503]: INFO nova.compute.manager [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Took 41.09 seconds to build instance. [ 709.706319] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b89dfb-f707-4b79-a805-60e2d503aeed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.755944] env[65503]: DEBUG nova.network.neutron [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 709.759590] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cc9789-316f-43a0-99d9-2388c22efee7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.764247] env[65503]: DEBUG nova.compute.manager [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Received event network-changed-26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 709.764999] env[65503]: DEBUG nova.compute.manager [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Refreshing instance network info cache due to event network-changed-26c792db-2c05-4dd4-8223-013b2d5d3f9f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 709.764999] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Acquiring lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.764999] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Acquired lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.765363] env[65503]: DEBUG nova.network.neutron [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Refreshing network info cache for port 26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 709.777059] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f8c007-0bcb-4e30-9e4c-b4c5237df950 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.790134] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 709.790831] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 709.820486] env[65503]: DEBUG nova.compute.provider_tree [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.920092] env[65503]: WARNING neutronclient.v2_0.client [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 709.920715] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 709.921211] env[65503]: WARNING openstack [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 709.939102] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525101ec-88c3-e0a6-7886-185a71418351, 'name': SearchDatastore_Task, 'duration_secs': 0.027382} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.939437] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.939655] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.939881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.940030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.940211] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.940492] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-822d8784-b768-46d5-895c-ed73efd40b42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.954311] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.954469] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 709.955265] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed5f7825-de74-47ce-9225-69bae5ef690b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.962745] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 709.962745] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d5a64-7965-f757-cc73-4680dd742bbe" [ 709.962745] env[65503]: _type = "Task" [ 709.962745] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.971189] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d5a64-7965-f757-cc73-4680dd742bbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.012043] env[65503]: DEBUG nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 710.048321] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449700, 'name': PowerOffVM_Task, 'duration_secs': 0.234814} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.048618] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 710.048838] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 710.049702] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be518133-0e17-4a7d-a558-dd643ba5da61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.060504] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 710.060880] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8bb483f0-738e-4f33-ad61-a37fa8430a4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.066701] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.084622] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449701, 'name': Rename_Task, 'duration_secs': 0.175165} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.085172] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.086030] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72018ce1-8e5f-4259-b84a-d7ff1d4c63ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.094345] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 710.094345] env[65503]: value = "task-4449704" [ 710.094345] env[65503]: _type = "Task" [ 710.094345] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.113686] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449704, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.127268] env[65503]: DEBUG nova.network.neutron [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 710.136233] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 710.136233] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 710.136412] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleting the datastore file [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 710.136664] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b2b9470-b945-4497-b9ad-42d6c9b608cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.146299] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 710.146299] env[65503]: value = "task-4449705" [ 710.146299] env[65503]: _type = "Task" [ 710.146299] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.158859] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.164780] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087151} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.165569] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.166908] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe90cdc-910d-483e-902f-364895c9c729 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.192531] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 606b8e9f-67c0-4d5c-85ab-ca35f8b31977/606b8e9f-67c0-4d5c-85ab-ca35f8b31977.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.192930] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85d270e4-6ed1-4b2a-af76-c08c486a42f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.208369] env[65503]: DEBUG oslo_concurrency.lockutils [None req-90127e2b-a979-4ac8-9bbf-7cdb487731f8 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.608s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.215343] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 710.215343] env[65503]: value = "task-4449706" [ 710.215343] env[65503]: _type = "Task" [ 710.215343] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.225117] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449706, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.268481] env[65503]: WARNING neutronclient.v2_0.client [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 710.269297] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 710.269681] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 710.324025] env[65503]: DEBUG nova.scheduler.client.report [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.357878] env[65503]: DEBUG nova.compute.manager [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Received event network-vif-plugged-7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 710.358345] env[65503]: DEBUG oslo_concurrency.lockutils [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Acquiring lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.358597] env[65503]: DEBUG oslo_concurrency.lockutils [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.358860] env[65503]: DEBUG oslo_concurrency.lockutils [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.359152] env[65503]: DEBUG nova.compute.manager [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] No waiting events found dispatching network-vif-plugged-7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 710.359385] env[65503]: WARNING nova.compute.manager [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Received unexpected event network-vif-plugged-7dcf2e2a-4e77-459c-9936-568c34f49a33 for instance with vm_state building and task_state spawning. [ 710.359558] env[65503]: DEBUG nova.compute.manager [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Received event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 710.359876] env[65503]: DEBUG nova.compute.manager [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing instance network info cache due to event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 710.360100] env[65503]: DEBUG oslo_concurrency.lockutils [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Acquiring lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.400191] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 710.400594] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 710.474372] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d5a64-7965-f757-cc73-4680dd742bbe, 'name': SearchDatastore_Task, 'duration_secs': 0.018886} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.475711] env[65503]: WARNING neutronclient.v2_0.client [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 710.476355] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 710.476696] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 710.486557] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf874d7f-712d-4401-8dd3-0b78dc457234 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.495398] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 710.495398] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6ee4d-5481-48f4-1b04-6c0582f16971" [ 710.495398] env[65503]: _type = "Task" [ 710.495398] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.504340] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6ee4d-5481-48f4-1b04-6c0582f16971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.544730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.572988] env[65503]: DEBUG nova.network.neutron [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Updated VIF entry in instance network info cache for port 26c792db-2c05-4dd4-8223-013b2d5d3f9f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 710.573985] env[65503]: DEBUG nova.network.neutron [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Updating instance_info_cache with network_info: [{"id": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "address": "fa:16:3e:74:d8:98", "network": {"id": "5551750c-9fbd-4b2e-a507-602c3bec1c4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1477558748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb3b7254cf72404d805209ff11130a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26c792db-2c", "ovs_interfaceid": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 710.609241] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449704, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.628361] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Releasing lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.628754] env[65503]: DEBUG nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Instance network_info: |[{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 710.629494] env[65503]: DEBUG oslo_concurrency.lockutils [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Acquired lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.629722] env[65503]: DEBUG nova.network.neutron [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 710.631648] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:dc:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9ee6f9-33be-4f58-8248-694024ec31d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dcf2e2a-4e77-459c-9936-568c34f49a33', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.643376] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Creating folder: Project (ebf44cd2ee0e4906bcdc3d16dfe7c838). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 710.644724] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26827cfe-97d2-430e-98bb-26633f178613 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.658844] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345826} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.659271] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 710.659531] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 710.659816] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.664779] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Created folder: Project (ebf44cd2ee0e4906bcdc3d16dfe7c838) in parent group-v870190. [ 710.665119] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Creating folder: Instances. Parent ref: group-v870295. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 710.665503] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d64eac4-d86a-44c3-a160-96f64d3ff6cf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.683592] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Created folder: Instances in parent group-v870295. [ 710.683767] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 710.684040] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 710.684253] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e1f63b5-9ab4-4926-a3cd-93be73f8c15d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.708141] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.708141] env[65503]: value = "task-4449709" [ 710.708141] env[65503]: _type = "Task" [ 710.708141] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.720189] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449709, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.730731] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449706, 'name': ReconfigVM_Task, 'duration_secs': 0.480134} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.730868] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 606b8e9f-67c0-4d5c-85ab-ca35f8b31977/606b8e9f-67c0-4d5c-85ab-ca35f8b31977.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.731597] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a24ddc3d-2d7b-4db0-a847-e51f169edcb0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.739339] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 710.739339] env[65503]: value = "task-4449710" [ 710.739339] env[65503]: _type = "Task" [ 710.739339] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.743294] env[65503]: DEBUG nova.network.neutron [-] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 710.751862] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449710, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.829042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.830380] env[65503]: DEBUG nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 710.833104] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 23.196s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.009022] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6ee4d-5481-48f4-1b04-6c0582f16971, 'name': SearchDatastore_Task, 'duration_secs': 0.025849} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.009369] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.009639] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 34008711-b51b-467b-b972-bfda1023d696/34008711-b51b-467b-b972-bfda1023d696.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.009964] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-541f67a6-43fd-4bc3-913c-40b741a6470d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.019142] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 711.019142] env[65503]: value = "task-4449711" [ 711.019142] env[65503]: _type = "Task" [ 711.019142] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.031585] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.077315] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Releasing lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.077790] env[65503]: DEBUG nova.compute.manager [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Received event network-vif-plugged-639772e3-599d-4f7e-81ad-21f2c2f49bbe {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 711.078129] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Acquiring lock "34008711-b51b-467b-b972-bfda1023d696-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.078473] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Lock "34008711-b51b-467b-b972-bfda1023d696-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.078742] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Lock "34008711-b51b-467b-b972-bfda1023d696-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.079062] env[65503]: DEBUG nova.compute.manager [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] No waiting events found dispatching network-vif-plugged-639772e3-599d-4f7e-81ad-21f2c2f49bbe {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 711.079282] env[65503]: WARNING nova.compute.manager [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Received unexpected event network-vif-plugged-639772e3-599d-4f7e-81ad-21f2c2f49bbe for instance with vm_state building and task_state spawning. [ 711.079456] env[65503]: DEBUG nova.compute.manager [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Received event network-changed-639772e3-599d-4f7e-81ad-21f2c2f49bbe {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 711.079606] env[65503]: DEBUG nova.compute.manager [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Refreshing instance network info cache due to event network-changed-639772e3-599d-4f7e-81ad-21f2c2f49bbe. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 711.079796] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Acquiring lock "refresh_cache-34008711-b51b-467b-b972-bfda1023d696" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.080388] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Acquired lock "refresh_cache-34008711-b51b-467b-b972-bfda1023d696" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.080388] env[65503]: DEBUG nova.network.neutron [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Refreshing network info cache for port 639772e3-599d-4f7e-81ad-21f2c2f49bbe {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 711.108271] env[65503]: DEBUG oslo_vmware.api [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4449704, 'name': PowerOnVM_Task, 'duration_secs': 0.678033} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.108569] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.108772] env[65503]: INFO nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Took 9.40 seconds to spawn the instance on the hypervisor. [ 711.108945] env[65503]: DEBUG nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 711.109820] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f06597-3262-4ade-b61f-3648d42af1b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.145626] env[65503]: WARNING neutronclient.v2_0.client [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.146445] env[65503]: WARNING openstack [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 711.147096] env[65503]: WARNING openstack [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 711.220309] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449709, 'name': CreateVM_Task, 'duration_secs': 0.485006} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.220504] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 711.221094] env[65503]: WARNING neutronclient.v2_0.client [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.221432] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.221613] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.221939] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 711.222313] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8f83beb-7b6f-4096-b10f-b53e3a9b73ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.230073] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 711.230073] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6ef9e-f01e-ff43-8e4f-2ad2850f2bd9" [ 711.230073] env[65503]: _type = "Task" [ 711.230073] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.240577] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6ef9e-f01e-ff43-8e4f-2ad2850f2bd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.245762] env[65503]: INFO nova.compute.manager [-] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Took 1.74 seconds to deallocate network for instance. [ 711.251320] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449710, 'name': Rename_Task, 'duration_secs': 0.265908} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.254368] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.254648] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0da3a3ed-63e4-400f-9e77-f66c60b86f8e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.264272] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 711.264272] env[65503]: value = "task-4449712" [ 711.264272] env[65503]: _type = "Task" [ 711.264272] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.275996] env[65503]: DEBUG nova.compute.manager [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 711.277689] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d2c6b6-ca83-466f-9356-0f2be4d2cdf9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.286109] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449712, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.309268] env[65503]: WARNING openstack [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 711.309733] env[65503]: WARNING openstack [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 711.336831] env[65503]: DEBUG nova.compute.utils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 711.338382] env[65503]: DEBUG nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 711.338572] env[65503]: DEBUG nova.network.neutron [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 711.338885] env[65503]: WARNING neutronclient.v2_0.client [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.339463] env[65503]: WARNING neutronclient.v2_0.client [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.342247] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 711.342700] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 711.414955] env[65503]: WARNING neutronclient.v2_0.client [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.415782] env[65503]: WARNING openstack [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 711.416224] env[65503]: WARNING openstack [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 711.427837] env[65503]: DEBUG nova.policy [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ecd3784a4a541c3907979c4dab0ac20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2254fd86b74662975d3ad1fa4b0f74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 711.531133] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449711, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.537255] env[65503]: DEBUG nova.network.neutron [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updated VIF entry in instance network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 711.537255] env[65503]: DEBUG nova.network.neutron [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 711.585890] env[65503]: WARNING neutronclient.v2_0.client [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.585890] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 711.585890] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 711.630183] env[65503]: INFO nova.compute.manager [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Took 41.41 seconds to build instance. [ 711.695223] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 711.695528] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 711.695890] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 711.695890] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 711.696172] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 711.696278] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 711.696657] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 711.696657] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 711.696836] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 711.697074] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 711.698406] env[65503]: DEBUG nova.virt.hardware [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 711.698406] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ba30cd-7131-4dbe-9889-dd40eadc181b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.708167] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d858e4b7-65f8-4ac6-9139-e6845902b8cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.727502] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:29:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a0ecec2-8866-4131-aa0b-e63ba349190f', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 711.735814] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 711.736223] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 711.739559] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57f27dde-a55b-44a6-b2c6-a2446dc34dae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.757852] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.761908] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6ef9e-f01e-ff43-8e4f-2ad2850f2bd9, 'name': SearchDatastore_Task, 'duration_secs': 0.035525} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.763314] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.763555] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.763780] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.763923] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.764109] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.764359] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 711.764359] env[65503]: value = "task-4449713" [ 711.764359] env[65503]: _type = "Task" [ 711.764359] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.764536] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c245f870-7bf5-4ca2-b018-ada7abae5913 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.782108] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449713, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.783188] env[65503]: DEBUG nova.network.neutron [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Successfully created port: 0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 711.789828] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449712, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.789828] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.789828] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.790993] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "8c274097-234a-44be-9159-c2fb0f1a8da1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.791619] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.791619] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "8c274097-234a-44be-9159-c2fb0f1a8da1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.791914] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.791914] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.793496] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ef9ace1-717c-428a-b8bc-90801c554238 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.796580] env[65503]: INFO nova.compute.manager [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Terminating instance [ 711.798932] env[65503]: INFO nova.compute.manager [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] instance snapshotting [ 711.803357] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9cf708-6a07-415f-bba3-c0fce2af5a5e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.812442] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 711.812442] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c9250e-3cec-d0ca-80bc-f6a2ff6c177c" [ 711.812442] env[65503]: _type = "Task" [ 711.812442] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.834805] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67df7e7-7010-495b-bc3a-dc2026316f73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.842186] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c9250e-3cec-d0ca-80bc-f6a2ff6c177c, 'name': SearchDatastore_Task, 'duration_secs': 0.012969} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.843334] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b59c1a5-d308-4a7b-97a0-a9a5cab65266 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.854993] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 711.854993] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c934e5-3f80-b121-f0a0-06b0a3dc6c6e" [ 711.854993] env[65503]: _type = "Task" [ 711.854993] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.864643] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c934e5-3f80-b121-f0a0-06b0a3dc6c6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.872534] env[65503]: DEBUG nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 711.905611] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ad85eef0-cef7-4900-b193-1737a6c2f17b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.905778] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.905896] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 1e92795e-cf30-4175-9e31-c29278f3e9e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.906016] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance db942a2d-671b-4036-a80b-d2375145cd29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.906133] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance eec6a484-ab00-402e-a369-c3009065c553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.906267] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 5e2cf383-312b-404f-acff-2ecb75678600 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 711.906385] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a1908e71-31f9-4308-b4d6-7908d3208c5a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 711.906490] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2ab1cd4b-f2c0-4264-8463-8127a733a1c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.906594] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 39266117-e82e-48ae-932a-be04b1a7351a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.906696] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 1c598208-a4d0-46b8-9a9c-107353e957b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.906798] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 9297d849-a966-48da-ba6a-453c42b99e44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.906911] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 16d508f9-72f0-4853-92fb-c8c7a37b5668 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 711.907025] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 9dbaff4f-ab02-481b-b51f-b134021d277c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.907143] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ab09cfe5-8257-462b-9ebf-87081d5793ac is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 711.907248] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 8c274097-234a-44be-9159-c2fb0f1a8da1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.907351] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f7b81948-c480-47a4-9d0f-5c2c163bd7f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.907461] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance fd548bd7-b686-43ef-83a7-c40addf8ba75 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 711.907574] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 3ac287b4-2538-472b-84ac-7fed3c2ffff3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 711.907680] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 80cf5690-8a18-471a-b02f-3b7b9e539c0d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 711.907782] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a197b590-1f74-4241-9579-2f2d3bb89a1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.907881] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.907979] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 8f0bf665-b21b-42ed-816d-69dee2f40654 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.908541] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 38e9a714-87f8-422c-9cc5-09b6aec76198 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.908687] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 606b8e9f-67c0-4d5c-85ab-ca35f8b31977 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.908796] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 34008711-b51b-467b-b972-bfda1023d696 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.908901] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0c0c6d3e-f4d2-458f-aa69-19f87a37f162 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 711.909040] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 45a4b511-aa6a-433d-b136-f53686db9575 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 712.031545] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449711, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702807} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.031895] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 34008711-b51b-467b-b972-bfda1023d696/34008711-b51b-467b-b972-bfda1023d696.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.031995] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.032474] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82be27df-c2d2-49d3-904c-4b222d116df2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.042977] env[65503]: DEBUG oslo_concurrency.lockutils [req-560934bb-272b-40a9-bfce-a3a69d2b940e req-73bddfa2-3749-46f3-aad2-54283a613761 service nova] Releasing lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.043691] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 712.043691] env[65503]: value = "task-4449714" [ 712.043691] env[65503]: _type = "Task" [ 712.043691] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.054806] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.134807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-075f5df2-ffa0-46d7-bc38-bb4fde9e0b96 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.925s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.141746] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 712.142345] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 712.245224] env[65503]: WARNING neutronclient.v2_0.client [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 712.245224] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 712.245224] env[65503]: WARNING openstack [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 712.287920] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449712, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.288158] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449713, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.303235] env[65503]: DEBUG nova.compute.manager [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 712.303532] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.304405] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f0bbd5-db7b-41c6-9161-d585c04b773d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.312823] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 712.313260] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22ff99e1-5684-45ec-b081-a552db434de6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.353195] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 712.353569] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-35b5649a-e0e3-4bdd-8f35-57b7d77f2905 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.375994] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c934e5-3f80-b121-f0a0-06b0a3dc6c6e, 'name': SearchDatastore_Task, 'duration_secs': 0.014977} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.376459] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 712.376459] env[65503]: value = "task-4449716" [ 712.376459] env[65503]: _type = "Task" [ 712.376459] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.380632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.380908] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0c0c6d3e-f4d2-458f-aa69-19f87a37f162/0c0c6d3e-f4d2-458f-aa69-19f87a37f162.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 712.381676] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3639a71e-016c-4a13-968b-8bd6d92edd79 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.396643] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449716, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.399430] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 712.399430] env[65503]: value = "task-4449717" [ 712.399430] env[65503]: _type = "Task" [ 712.399430] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.399879] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 712.400100] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 712.400343] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Deleting the datastore file [datastore1] 8c274097-234a-44be-9159-c2fb0f1a8da1 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 712.400786] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fb851bf-4393-448a-9709-751f31132706 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.415837] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 9acbc312-a3a2-4758-87cd-5576c4f1f8dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 712.418463] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.420972] env[65503]: DEBUG oslo_vmware.api [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 712.420972] env[65503]: value = "task-4449718" [ 712.420972] env[65503]: _type = "Task" [ 712.420972] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.435104] env[65503]: DEBUG oslo_vmware.api [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.554923] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073583} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.555231] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.556158] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d3a83b-54c1-4e29-aa5d-8a02e0921984 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.584941] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 34008711-b51b-467b-b972-bfda1023d696/34008711-b51b-467b-b972-bfda1023d696.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.589127] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e42641a6-5291-40b4-90b9-cf285d0e2184 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.619321] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 712.619321] env[65503]: value = "task-4449719" [ 712.619321] env[65503]: _type = "Task" [ 712.619321] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.633978] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449719, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.783977] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449713, 'name': CreateVM_Task, 'duration_secs': 0.967822} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.787886] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 712.788261] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449712, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.789219] env[65503]: WARNING neutronclient.v2_0.client [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 712.789776] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.789991] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.790427] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 712.790879] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c11295b-0cbd-452a-91f3-a4d7680f00bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.798694] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 712.798694] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522f9c6d-ef57-f449-2297-16b258eea97a" [ 712.798694] env[65503]: _type = "Task" [ 712.798694] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.810496] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522f9c6d-ef57-f449-2297-16b258eea97a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.883815] env[65503]: DEBUG nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 712.899063] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449716, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.913402] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449717, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.919520] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 712.926083] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 712.926460] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 712.926610] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 712.926936] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 712.927168] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 712.927342] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 712.927634] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.927840] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 712.928044] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 712.928314] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 712.928612] env[65503]: DEBUG nova.virt.hardware [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 712.929898] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd256da-aa59-45fd-9bf4-53a7a1da934a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.970341] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb4e9a7-c89c-43c5-8feb-4e0540e03069 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.970341] env[65503]: DEBUG oslo_vmware.api [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387145} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.970341] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 712.970341] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 712.970341] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.970341] env[65503]: INFO nova.compute.manager [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Took 0.67 seconds to destroy the instance on the hypervisor. [ 712.970341] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 712.970341] env[65503]: DEBUG nova.compute.manager [-] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 712.970341] env[65503]: DEBUG nova.network.neutron [-] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 712.971404] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 712.972016] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 712.972158] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 713.130922] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449719, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.282043] env[65503]: DEBUG oslo_vmware.api [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449712, 'name': PowerOnVM_Task, 'duration_secs': 1.742701} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.282043] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.282043] env[65503]: INFO nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Took 10.69 seconds to spawn the instance on the hypervisor. [ 713.282043] env[65503]: DEBUG nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 713.283214] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf13317-ec10-4efc-bd16-3a2de5bd992a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.317894] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522f9c6d-ef57-f449-2297-16b258eea97a, 'name': SearchDatastore_Task, 'duration_secs': 0.080638} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.317894] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.317894] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 713.318162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.318314] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.318449] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 713.318728] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fb84501-ca89-45ad-bfa7-4fbd61ccd09a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.330231] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 713.330431] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 713.331314] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc6c3a1e-4402-4164-9cba-b5dd0d9a1c85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.339100] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 713.339100] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52db6e66-3977-e253-53cf-96906bc414f9" [ 713.339100] env[65503]: _type = "Task" [ 713.339100] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.350172] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52db6e66-3977-e253-53cf-96906bc414f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.393855] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449716, 'name': CreateSnapshot_Task, 'duration_secs': 0.979495} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.394451] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 713.394952] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca20e1bf-0a20-4773-a813-047c541b23a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.417125] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449717, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768184} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.417510] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0c0c6d3e-f4d2-458f-aa69-19f87a37f162/0c0c6d3e-f4d2-458f-aa69-19f87a37f162.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 713.417643] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.417925] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3234145-7fb1-4f83-8c43-4fbe1b1a4a55 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.424085] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 75578ccd-2b34-4948-9afa-ac94e9fd8b4b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 713.427522] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 713.427522] env[65503]: value = "task-4449720" [ 713.427522] env[65503]: _type = "Task" [ 713.427522] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.439170] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449720, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.448186] env[65503]: DEBUG nova.network.neutron [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Successfully updated port: 0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 713.506740] env[65503]: DEBUG nova.network.neutron [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Updated VIF entry in instance network info cache for port 639772e3-599d-4f7e-81ad-21f2c2f49bbe. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 713.507134] env[65503]: DEBUG nova.network.neutron [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Updating instance_info_cache with network_info: [{"id": "639772e3-599d-4f7e-81ad-21f2c2f49bbe", "address": "fa:16:3e:db:9d:60", "network": {"id": "8540897b-c867-4372-aa4a-150eda1ef1bb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-671031913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e2bb13f406914efb8b6ae36d052c8f21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap639772e3-59", "ovs_interfaceid": "639772e3-599d-4f7e-81ad-21f2c2f49bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 713.545063] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 713.632631] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449719, 'name': ReconfigVM_Task, 'duration_secs': 0.536524} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.633047] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 34008711-b51b-467b-b972-bfda1023d696/34008711-b51b-467b-b972-bfda1023d696.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.633827] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b0c9275-bc26-45ed-aa5f-bc3db8729b2a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.641787] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 713.641787] env[65503]: value = "task-4449721" [ 713.641787] env[65503]: _type = "Task" [ 713.641787] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.652966] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449721, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.803195] env[65503]: INFO nova.compute.manager [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Took 40.96 seconds to build instance. [ 713.853721] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52db6e66-3977-e253-53cf-96906bc414f9, 'name': SearchDatastore_Task, 'duration_secs': 0.016711} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.855722] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbf49e12-9b2c-4edf-b1c6-2594579daa58 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.864578] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 713.864578] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5290407c-a82c-acc3-b849-ef9a893a59f8" [ 713.864578] env[65503]: _type = "Task" [ 713.864578] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.886023] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5290407c-a82c-acc3-b849-ef9a893a59f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.923439] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 713.924750] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-deae3d21-dcc2-4fe1-9a8a-7135cc8b998c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.930894] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b6cda94b-2894-4cf0-8522-6593df9723bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 713.930894] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 713.930894] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=100GB used_disk=19GB total_vcpus=48 used_vcpus=20 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '20', 'num_vm_active': '14', 'num_task_None': '13', 'num_os_type_None': '20', 'num_proj_771d20568f55445088cc06737c184615': '1', 'io_workload': '6', 'num_proj_5de0ae091db74426975a523e945110fa': '4', 'num_proj_9c519075bc624e7b90915354752765da': '1', 'num_proj_72a3774600024de1b1347117fd020278': '1', 'num_proj_6797718310754721b81c8d30acfdacd5': '2', 'num_task_image_snapshot': '1', 'num_proj_7dd65b5b754e4028a7aeecd7daaa4557': '2', 'num_task_deleting': '1', 'num_proj_c024f22a228f4d2faa4b9316ca53a1ea': '1', 'num_vm_suspended': '1', 'num_proj_eecbcfbbb0904f57939c23ef03418bd5': '1', 'num_proj_be67f50c5bc447309d4c04f3f2805455': '1', 'num_task_rebuild_block_device_mapping': '1', 'num_proj_bf54348a3d0948cfa816cc3746e86806': '1', 'num_proj_a8d373b14fc34ee69c50f9f7ce58c888': '1', 'num_vm_building': '5', 'num_task_spawning': '4', 'num_proj_cb3b7254cf72404d805209ff11130a1e': '1', 'num_proj_ebf44cd2ee0e4906bcdc3d16dfe7c838': '1', 'num_proj_e2bb13f406914efb8b6ae36d052c8f21': '1', 'num_proj_5c2254fd86b74662975d3ad1fa4b0f74': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 713.946334] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449720, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219068} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.948037] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.948808] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 713.948808] env[65503]: value = "task-4449722" [ 713.948808] env[65503]: _type = "Task" [ 713.948808] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.949676] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bef79e-33cb-4a9f-b361-85196b9bdb93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.953980] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.956854] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.957099] env[65503]: DEBUG nova.network.neutron [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 713.972082] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449722, 'name': CloneVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.993268] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 0c0c6d3e-f4d2-458f-aa69-19f87a37f162/0c0c6d3e-f4d2-458f-aa69-19f87a37f162.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.997040] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d6b0b09-f9be-46ad-aa2f-f0fdc6c40931 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.014746] env[65503]: DEBUG oslo_concurrency.lockutils [req-6da0363f-8d9e-4f62-a648-5c9c9a8d2ed2 req-c0d874b1-1fd0-47d6-9d5e-734e423eca87 service nova] Releasing lock "refresh_cache-34008711-b51b-467b-b972-bfda1023d696" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.028538] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 714.028538] env[65503]: value = "task-4449723" [ 714.028538] env[65503]: _type = "Task" [ 714.028538] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.040777] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449723, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.160386] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449721, 'name': Rename_Task, 'duration_secs': 0.236479} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.160386] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.160386] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d3dccc4-fe01-4320-a300-a0ccf2d9dd31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.169712] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 714.169712] env[65503]: value = "task-4449724" [ 714.169712] env[65503]: _type = "Task" [ 714.169712] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.180822] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.308210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f82518bd-8b8e-4153-ade2-edde79ffebfd tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.499s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.386152] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5290407c-a82c-acc3-b849-ef9a893a59f8, 'name': SearchDatastore_Task, 'duration_secs': 0.048557} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.386592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.386981] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 714.387437] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-487e7c64-eddb-429c-a021-7155c8a8220a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.402582] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 714.402582] env[65503]: value = "task-4449725" [ 714.402582] env[65503]: _type = "Task" [ 714.402582] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.408377] env[65503]: DEBUG nova.network.neutron [-] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 714.422498] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.467963] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 714.468616] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 714.491269] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449722, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.536139] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449723, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.612067] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068addff-60ee-4092-b6fa-3809099b5fdf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.621416] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b74b33a-5ba0-400d-9f03-d4d927a477e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.658514] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852cf030-7f73-4a19-ac65-240d2933b47e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.667882] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0453df6d-91f9-403a-9210-5c588e31498f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.687017] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.691915] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449724, 'name': PowerOnVM_Task} progress is 78%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.835760] env[65503]: DEBUG nova.network.neutron [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 714.904534] env[65503]: DEBUG nova.compute.manager [req-54c0663b-c369-41f7-a5e3-99cc109647ec req-2a93b3e4-c3e8-47b3-a2bf-61e26cab8930 service nova] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Received event network-vif-deleted-3985c7bb-5579-4aeb-9dce-54e2716f7d60 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 714.904854] env[65503]: DEBUG nova.compute.manager [req-54c0663b-c369-41f7-a5e3-99cc109647ec req-2a93b3e4-c3e8-47b3-a2bf-61e26cab8930 service nova] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Received event network-vif-deleted-ea1023c2-b650-4073-a973-29291b753f53 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 714.920607] env[65503]: INFO nova.compute.manager [-] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Took 1.95 seconds to deallocate network for instance. [ 714.921545] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449725, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.969636] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449722, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.044017] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449723, 'name': ReconfigVM_Task, 'duration_secs': 0.676143} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.045076] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 715.045519] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 715.054236] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 0c0c6d3e-f4d2-458f-aa69-19f87a37f162/0c0c6d3e-f4d2-458f-aa69-19f87a37f162.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.056023] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6ad0198-ea79-4f23-8274-821ac8a6133b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.066026] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 715.066026] env[65503]: value = "task-4449726" [ 715.066026] env[65503]: _type = "Task" [ 715.066026] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.080443] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449726, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.186165] env[65503]: DEBUG oslo_vmware.api [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449724, 'name': PowerOnVM_Task, 'duration_secs': 0.888704} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.187136] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.187495] env[65503]: INFO nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Took 10.43 seconds to spawn the instance on the hypervisor. [ 715.187811] env[65503]: DEBUG nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 715.188813] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7990e4-b2c7-47d3-96c0-51001418b3d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.193364] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.358910] env[65503]: WARNING neutronclient.v2_0.client [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 715.359843] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 715.360356] env[65503]: WARNING openstack [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 715.418209] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732201} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.418505] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 715.418719] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 715.418988] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2b5493f-2559-4ffa-8d72-e1d12f6472f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.427209] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 715.427209] env[65503]: value = "task-4449727" [ 715.427209] env[65503]: _type = "Task" [ 715.427209] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.433675] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.448765] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.467821] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449722, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.490317] env[65503]: DEBUG nova.network.neutron [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [{"id": "0edc90ad-4b80-4fad-8456-06f696d9756a", "address": "fa:16:3e:37:71:1c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0edc90ad-4b", "ovs_interfaceid": "0edc90ad-4b80-4fad-8456-06f696d9756a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 715.578553] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449726, 'name': Rename_Task, 'duration_secs': 0.199523} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.578900] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 715.579092] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ead79e4-f797-4d0d-b2a3-59c19a017ce6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.586251] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 715.586251] env[65503]: value = "task-4449728" [ 715.586251] env[65503]: _type = "Task" [ 715.586251] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.596031] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.706834] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 715.706834] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.871s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.706834] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.196s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.707847] env[65503]: INFO nova.compute.claims [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.731259] env[65503]: INFO nova.compute.manager [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Took 33.99 seconds to build instance. [ 715.944330] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154772} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.944636] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 715.945460] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a203d89-2e78-4541-9b0f-9e0e2b9975fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.971825] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 715.977831] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94d681b2-ce72-4a5a-9680-531889c988e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.992786] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Releasing lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.993203] env[65503]: DEBUG nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Instance network_info: |[{"id": "0edc90ad-4b80-4fad-8456-06f696d9756a", "address": "fa:16:3e:37:71:1c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0edc90ad-4b", "ovs_interfaceid": "0edc90ad-4b80-4fad-8456-06f696d9756a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 715.994112] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:71:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0edc90ad-4b80-4fad-8456-06f696d9756a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.001733] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Creating folder: Project (5c2254fd86b74662975d3ad1fa4b0f74). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 716.006123] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-794735c0-56e3-4601-8243-664b5bd5fcb7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.008384] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449722, 'name': CloneVM_Task, 'duration_secs': 1.632647} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.010321] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Created linked-clone VM from snapshot [ 716.010625] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 716.010625] env[65503]: value = "task-4449729" [ 716.010625] env[65503]: _type = "Task" [ 716.010625] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.012206] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c122c9b7-3902-4f89-9770-e034f91a1e58 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.026858] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Created folder: Project (5c2254fd86b74662975d3ad1fa4b0f74) in parent group-v870190. [ 716.027252] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Creating folder: Instances. Parent ref: group-v870301. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 716.036052] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a817d72-f6b9-49f1-8170-b4c086a9d97d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.037077] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449729, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.037389] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Uploading image b4d9903d-4ccc-461a-a20c-5aed03a50d5e {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 716.053139] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Created folder: Instances in parent group-v870301. [ 716.053220] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 716.053556] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 716.054046] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02e5e598-a759-44fe-90ac-7f3e20fccfea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.082453] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 716.082453] env[65503]: value = "vm-870300" [ 716.082453] env[65503]: _type = "VirtualMachine" [ 716.082453] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 716.082837] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-756f4132-280b-41b2-94b7-45d91ddf20e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.086205] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.086205] env[65503]: value = "task-4449732" [ 716.086205] env[65503]: _type = "Task" [ 716.086205] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.098280] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lease: (returnval){ [ 716.098280] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c428d0-9045-a3c1-9637-a6d0fad6977c" [ 716.098280] env[65503]: _type = "HttpNfcLease" [ 716.098280] env[65503]: } obtained for exporting VM: (result){ [ 716.098280] env[65503]: value = "vm-870300" [ 716.098280] env[65503]: _type = "VirtualMachine" [ 716.098280] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 716.098813] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the lease: (returnval){ [ 716.098813] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c428d0-9045-a3c1-9637-a6d0fad6977c" [ 716.098813] env[65503]: _type = "HttpNfcLease" [ 716.098813] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 716.115276] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449728, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.115602] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449732, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.120825] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 716.120825] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c428d0-9045-a3c1-9637-a6d0fad6977c" [ 716.120825] env[65503]: _type = "HttpNfcLease" [ 716.120825] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 716.121273] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 716.121273] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c428d0-9045-a3c1-9637-a6d0fad6977c" [ 716.121273] env[65503]: _type = "HttpNfcLease" [ 716.121273] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 716.122625] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf237f1-e7b3-4935-bf23-f214bf47b523 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.134101] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c2dfc1-022d-8f22-7f8b-43dcf5e1fca2/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 716.134780] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c2dfc1-022d-8f22-7f8b-43dcf5e1fca2/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 716.233424] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62176f4f-79ba-46d3-91dc-2bd0fabea037 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "34008711-b51b-467b-b972-bfda1023d696" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.509s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.252827] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f37e352f-ce78-4f64-a1ef-6c8a1ccc6a73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.533818] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449729, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.611862] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449732, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.612376] env[65503]: DEBUG oslo_vmware.api [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449728, 'name': PowerOnVM_Task, 'duration_secs': 0.925668} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.612956] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 716.613443] env[65503]: INFO nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Took 8.73 seconds to spawn the instance on the hypervisor. [ 716.614962] env[65503]: DEBUG nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 716.615203] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7f40ab-beaa-444c-94cf-c9f02236613b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.036534] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449729, 'name': ReconfigVM_Task, 'duration_secs': 0.540097} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.037236] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1/5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 717.038073] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f759f0b9-bdf7-4e64-b8fd-e7069c86b9ac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.049488] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 717.049488] env[65503]: value = "task-4449734" [ 717.049488] env[65503]: _type = "Task" [ 717.049488] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.067602] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449734, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.103537] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449732, 'name': CreateVM_Task, 'duration_secs': 0.568897} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.103808] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 717.104697] env[65503]: WARNING neutronclient.v2_0.client [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 717.104838] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.104951] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.105484] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 717.105815] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bc95787-a675-4875-9edb-be744b29f47e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.112603] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 717.112603] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522edb72-a55d-5ffb-0ab3-1fffd8b0c01e" [ 717.112603] env[65503]: _type = "Task" [ 717.112603] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.127924] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522edb72-a55d-5ffb-0ab3-1fffd8b0c01e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.145420] env[65503]: INFO nova.compute.manager [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Took 35.40 seconds to build instance. [ 717.433159] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b9f79e-05f4-44f3-92c7-b636c62cfae8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.444581] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e936c1-66b8-4f4d-9f33-57c0074155cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.480104] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ce640a-8db4-494c-88c7-b8d00883b946 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.490609] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc143f60-7bc3-4de7-8144-82689a5ed529 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.508015] env[65503]: DEBUG nova.compute.provider_tree [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.564301] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449734, 'name': Rename_Task, 'duration_secs': 0.207779} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.564566] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 717.564870] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8303266d-ce5b-4583-91af-74fabba81ba0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.574052] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 717.574052] env[65503]: value = "task-4449735" [ 717.574052] env[65503]: _type = "Task" [ 717.574052] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.585904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Acquiring lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.586335] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Acquired lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.586749] env[65503]: DEBUG nova.network.neutron [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 717.598115] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449735, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.625687] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522edb72-a55d-5ffb-0ab3-1fffd8b0c01e, 'name': SearchDatastore_Task, 'duration_secs': 0.016968} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.626549] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.626549] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.627322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.627594] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.627867] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.630635] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8895d3c7-1ad2-41bb-ada8-aed72be74ef2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.646079] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.646079] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 717.646079] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4152421-b659-4f35-89af-f21a51bf77af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.650574] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c05d458-b815-4ee3-bd6a-444fd98cabb7 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.929s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.658378] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 717.658378] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a94ad0-d01f-35bc-293f-0e495253e9e7" [ 717.658378] env[65503]: _type = "Task" [ 717.658378] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.672727] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a94ad0-d01f-35bc-293f-0e495253e9e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.766420] env[65503]: DEBUG nova.compute.manager [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Received event network-changed-1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 717.768801] env[65503]: DEBUG nova.compute.manager [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Refreshing instance network info cache due to event network-changed-1e1fc8a1-f7e8-49f4-b328-b7f029f59874. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 717.769183] env[65503]: DEBUG oslo_concurrency.lockutils [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Acquiring lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.770071] env[65503]: DEBUG oslo_concurrency.lockutils [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Acquired lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.770071] env[65503]: DEBUG nova.network.neutron [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Refreshing network info cache for port 1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 718.013500] env[65503]: DEBUG nova.scheduler.client.report [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 718.089130] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449735, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.096024] env[65503]: WARNING neutronclient.v2_0.client [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.096024] env[65503]: WARNING openstack [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.096024] env[65503]: WARNING openstack [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.172995] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a94ad0-d01f-35bc-293f-0e495253e9e7, 'name': SearchDatastore_Task, 'duration_secs': 0.014393} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.174344] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eb7a1d2-7b27-4d6f-8195-250aecdd4200 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.182750] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 718.182750] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f10ac-0482-95d7-e64d-2b14365962b7" [ 718.182750] env[65503]: _type = "Task" [ 718.182750] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.197534] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f10ac-0482-95d7-e64d-2b14365962b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.274410] env[65503]: WARNING neutronclient.v2_0.client [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.275188] env[65503]: WARNING openstack [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.275628] env[65503]: WARNING openstack [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.311803] env[65503]: WARNING openstack [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.312201] env[65503]: WARNING openstack [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.392341] env[65503]: DEBUG nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received event network-vif-plugged-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 718.392341] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Acquiring lock "45a4b511-aa6a-433d-b136-f53686db9575-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.392341] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Lock "45a4b511-aa6a-433d-b136-f53686db9575-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.392341] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Lock "45a4b511-aa6a-433d-b136-f53686db9575-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.393333] env[65503]: DEBUG nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] No waiting events found dispatching network-vif-plugged-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 718.393611] env[65503]: WARNING nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received unexpected event network-vif-plugged-0edc90ad-4b80-4fad-8456-06f696d9756a for instance with vm_state building and task_state spawning. [ 718.394237] env[65503]: DEBUG nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 718.394446] env[65503]: DEBUG nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing instance network info cache due to event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 718.394698] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Acquiring lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.394934] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Acquired lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.395386] env[65503]: DEBUG nova.network.neutron [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 718.465729] env[65503]: WARNING neutronclient.v2_0.client [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.466487] env[65503]: WARNING openstack [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.466913] env[65503]: WARNING openstack [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.522360] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.817s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.522705] env[65503]: DEBUG nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 718.526637] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.600s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.527013] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.530095] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.224s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.530740] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.536114] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.581s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.536114] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.536692] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.984s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.536787] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.539445] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.395s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.539445] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.540734] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.788s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.542349] env[65503]: INFO nova.compute.claims [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.585360] env[65503]: INFO nova.scheduler.client.report [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Deleted allocations for instance ab09cfe5-8257-462b-9ebf-87081d5793ac [ 718.587924] env[65503]: INFO nova.scheduler.client.report [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Deleted allocations for instance 80cf5690-8a18-471a-b02f-3b7b9e539c0d [ 718.605332] env[65503]: DEBUG oslo_vmware.api [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449735, 'name': PowerOnVM_Task, 'duration_secs': 0.729406} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.609478] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 718.609747] env[65503]: DEBUG nova.compute.manager [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 718.611089] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722373cc-3ff4-4525-9775-f11ea5caaf5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.642425] env[65503]: INFO nova.scheduler.client.report [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Deleted allocations for instance a1908e71-31f9-4308-b4d6-7908d3208c5a [ 718.653298] env[65503]: INFO nova.scheduler.client.report [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Deleted allocations for instance fd548bd7-b686-43ef-83a7-c40addf8ba75 [ 718.665504] env[65503]: INFO nova.scheduler.client.report [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Deleted allocations for instance 3ac287b4-2538-472b-84ac-7fed3c2ffff3 [ 718.696325] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521f10ac-0482-95d7-e64d-2b14365962b7, 'name': SearchDatastore_Task, 'duration_secs': 0.013324} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.696500] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.696689] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 45a4b511-aa6a-433d-b136-f53686db9575/45a4b511-aa6a-433d-b136-f53686db9575.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 718.696965] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd38c7b6-13f1-493f-a7bd-d06c44f06618 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.707177] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 718.707177] env[65503]: value = "task-4449736" [ 718.707177] env[65503]: _type = "Task" [ 718.707177] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.721292] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.898631] env[65503]: WARNING neutronclient.v2_0.client [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.899367] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.899769] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.982903] env[65503]: WARNING openstack [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.983719] env[65503]: WARNING openstack [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 719.051140] env[65503]: DEBUG nova.compute.utils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 719.059024] env[65503]: DEBUG nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 719.059024] env[65503]: DEBUG nova.network.neutron [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 719.059024] env[65503]: WARNING neutronclient.v2_0.client [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 719.059024] env[65503]: WARNING neutronclient.v2_0.client [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 719.059024] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 719.059024] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 719.113586] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc68eb4a-0686-4787-8863-563167b3405c tempest-ServerDiagnosticsV248Test-634945769 tempest-ServerDiagnosticsV248Test-634945769-project-member] Lock "ab09cfe5-8257-462b-9ebf-87081d5793ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.047s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.118072] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2ad6c05-395e-4d07-8d29-ba51728aeee0 tempest-InstanceActionsNegativeTestJSON-1252111218 tempest-InstanceActionsNegativeTestJSON-1252111218-project-member] Lock "80cf5690-8a18-471a-b02f-3b7b9e539c0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.675s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.137571] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.150443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2660295-b4cd-451f-9a52-0ba080305f10 tempest-ServersTestJSON-1310463075 tempest-ServersTestJSON-1310463075-project-member] Lock "a1908e71-31f9-4308-b4d6-7908d3208c5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.781s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.162211] env[65503]: DEBUG oslo_concurrency.lockutils [None req-260228e9-3958-4506-a659-b4c69f288560 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "fd548bd7-b686-43ef-83a7-c40addf8ba75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.206s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.179887] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8dc43ff0-526f-48d7-bcc5-5ff2bf6946be tempest-ServerDiagnosticsTest-704656141 tempest-ServerDiagnosticsTest-704656141-project-member] Lock "3ac287b4-2538-472b-84ac-7fed3c2ffff3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.087s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.225076] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449736, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.332048] env[65503]: DEBUG nova.network.neutron [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Updating instance_info_cache with network_info: [{"id": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "address": "fa:16:3e:bc:d7:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd140c8d-63", "ovs_interfaceid": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 719.371419] env[65503]: DEBUG nova.policy [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55db708d2a9b47baa25cafed2be1ba91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '463e93d05e1e4b27a3dc866a5b1991d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 719.471043] env[65503]: WARNING neutronclient.v2_0.client [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 719.471043] env[65503]: WARNING openstack [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 719.472463] env[65503]: WARNING openstack [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 719.557080] env[65503]: DEBUG nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 719.598135] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 719.598135] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 719.725170] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595625} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.730114] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 45a4b511-aa6a-433d-b136-f53686db9575/45a4b511-aa6a-433d-b136-f53686db9575.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 719.730442] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.732384] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aef74406-bf9a-4fe5-910c-8aa8feb1a7d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.741779] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 719.741779] env[65503]: value = "task-4449737" [ 719.741779] env[65503]: _type = "Task" [ 719.741779] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.757230] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.810127] env[65503]: DEBUG nova.network.neutron [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Successfully created port: 6410d13c-8f5f-4943-818b-69c48368b69e {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 719.835681] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Releasing lock "refresh_cache-606b8e9f-67c0-4d5c-85ab-ca35f8b31977" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.836020] env[65503]: DEBUG nova.compute.manager [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Inject network info {{(pid=65503) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7777}} [ 719.836370] env[65503]: DEBUG nova.compute.manager [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] network_info to inject: |[{"id": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "address": "fa:16:3e:bc:d7:29", "network": {"id": "9955607b-0533-4b4e-b2bf-d934403738ea", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2081893056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5de0ae091db74426975a523e945110fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd140c8d-63", "ovs_interfaceid": "bd140c8d-63d1-4c8c-a14e-2f8ce80a648c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7778}} [ 719.841718] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Reconfiguring VM instance to set the machine id {{(pid=65503) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 719.845181] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39e0d700-c707-4ef8-b0a0-ab30a70e454b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.871439] env[65503]: DEBUG oslo_vmware.api [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Waiting for the task: (returnval){ [ 719.871439] env[65503]: value = "task-4449738" [ 719.871439] env[65503]: _type = "Task" [ 719.871439] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.881957] env[65503]: DEBUG oslo_vmware.api [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Task: {'id': task-4449738, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.113943] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94917397-a508-41e7-8dce-13e4148ff3ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.123389] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a654f2-dd8d-4afb-832d-c21d83d8c54a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.160998] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d766334-1b2d-4d68-b56d-ba7eda46b233 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.170597] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d39bcc-5ca0-4e50-b98a-eaae6a4c8c9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.186825] env[65503]: DEBUG nova.compute.provider_tree [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.253481] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118955} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.253854] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 720.254807] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c02935-c69c-4b19-8fdd-d7a25f196f77 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.282860] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 45a4b511-aa6a-433d-b136-f53686db9575/45a4b511-aa6a-433d-b136-f53686db9575.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.283518] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39f05dbe-27f9-4345-9264-be3f335211e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.308995] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 720.308995] env[65503]: value = "task-4449739" [ 720.308995] env[65503]: _type = "Task" [ 720.308995] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.320829] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449739, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.383702] env[65503]: DEBUG oslo_vmware.api [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] Task: {'id': task-4449738, 'name': ReconfigVM_Task, 'duration_secs': 0.211785} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.384016] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60c77782-9191-49ea-9c56-8360d050ee1d tempest-ServersAdminTestJSON-424739919 tempest-ServersAdminTestJSON-424739919-project-admin] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Reconfigured VM instance to set the machine id {{(pid=65503) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 720.479438] env[65503]: DEBUG nova.network.neutron [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updated VIF entry in instance network info cache for port 1e1fc8a1-f7e8-49f4-b328-b7f029f59874. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 720.479872] env[65503]: DEBUG nova.network.neutron [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updating instance_info_cache with network_info: [{"id": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "address": "fa:16:3e:cf:06:d8", "network": {"id": "234e0d20-0522-4720-b75f-e1246236d495", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1433648354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8d373b14fc34ee69c50f9f7ce58c888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e1fc8a1-f7", "ovs_interfaceid": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 720.578371] env[65503]: DEBUG nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 720.607078] env[65503]: WARNING neutronclient.v2_0.client [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 720.608214] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 720.608738] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 720.625545] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 720.625934] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.626127] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 720.626313] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.626454] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 720.626596] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 720.626809] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 720.626950] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 720.627127] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 720.627280] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 720.627443] env[65503]: DEBUG nova.virt.hardware [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 720.628826] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e554f4c-cc31-465d-aebb-fbb5788eeaed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.638339] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3ff9e1-752d-49db-acc2-0ed978668e50 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.690246] env[65503]: DEBUG nova.scheduler.client.report [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.820534] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449739, 'name': ReconfigVM_Task, 'duration_secs': 0.348462} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.820828] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 45a4b511-aa6a-433d-b136-f53686db9575/45a4b511-aa6a-433d-b136-f53686db9575.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.821715] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c233e29-60e6-4035-8f04-1bf95072b805 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.830545] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 720.830545] env[65503]: value = "task-4449740" [ 720.830545] env[65503]: _type = "Task" [ 720.830545] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.843253] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449740, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.983366] env[65503]: DEBUG oslo_concurrency.lockutils [req-9f8d1d9b-f120-4aaf-aaf9-3cd3ab2d22de req-2ea71e53-da4f-4d8e-aa50-78e835cea5fc service nova] Releasing lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.200653] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.201293] env[65503]: DEBUG nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 721.204220] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.268s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.204307] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.206582] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.666s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.208172] env[65503]: INFO nova.compute.claims [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.244379] env[65503]: INFO nova.scheduler.client.report [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Deleted allocations for instance 16d508f9-72f0-4853-92fb-c8c7a37b5668 [ 721.348148] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449740, 'name': Rename_Task, 'duration_secs': 0.159152} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.348148] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.348616] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb4c76a3-823e-48ce-aba7-9bde6b164aea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.358107] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 721.358107] env[65503]: value = "task-4449741" [ 721.358107] env[65503]: _type = "Task" [ 721.358107] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.370502] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449741, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.379310] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "7ed036d1-8188-4aab-9d6d-8d7e46147812" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.379748] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.713597] env[65503]: DEBUG nova.compute.utils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 721.718694] env[65503]: DEBUG nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 721.720958] env[65503]: DEBUG nova.network.neutron [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 721.720958] env[65503]: WARNING neutronclient.v2_0.client [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 721.720958] env[65503]: WARNING neutronclient.v2_0.client [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 721.720958] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 721.720958] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 721.755233] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c300cd47-cc96-4dad-9144-1463087eadb8 tempest-ServerMetadataNegativeTestJSON-170156206 tempest-ServerMetadataNegativeTestJSON-170156206-project-member] Lock "16d508f9-72f0-4853-92fb-c8c7a37b5668" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.416s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.871597] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449741, 'name': PowerOnVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.883311] env[65503]: DEBUG nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 721.895361] env[65503]: DEBUG nova.network.neutron [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Successfully updated port: 6410d13c-8f5f-4943-818b-69c48368b69e {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 722.219966] env[65503]: DEBUG nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 722.323892] env[65503]: DEBUG nova.network.neutron [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updated VIF entry in instance network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 722.325255] env[65503]: DEBUG nova.network.neutron [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [{"id": "0edc90ad-4b80-4fad-8456-06f696d9756a", "address": "fa:16:3e:37:71:1c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0edc90ad-4b", "ovs_interfaceid": "0edc90ad-4b80-4fad-8456-06f696d9756a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 722.378317] env[65503]: DEBUG oslo_vmware.api [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449741, 'name': PowerOnVM_Task, 'duration_secs': 0.688502} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.378317] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 722.378317] env[65503]: INFO nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Took 9.49 seconds to spawn the instance on the hypervisor. [ 722.378317] env[65503]: DEBUG nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 722.380816] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8055fc0d-8bcf-431c-96d1-6c1f6b7dc99b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.402451] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-9acbc312-a3a2-4758-87cd-5576c4f1f8dc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.402747] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-9acbc312-a3a2-4758-87cd-5576c4f1f8dc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.403092] env[65503]: DEBUG nova.network.neutron [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 722.412509] env[65503]: DEBUG nova.policy [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4708fca766f447daa757dbf855ff7d89', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '771d20568f55445088cc06737c184615', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 722.422488] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.784802] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba09d3d-bdcb-4491-b3c9-a8bd0ce99cf7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.794897] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325c25e3-76d1-4f40-b829-0a9eff5af7ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.836356] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Releasing lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.837687] env[65503]: DEBUG nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Received event network-vif-deleted-f303ecf7-0607-45a0-bad8-c2eb7e30b62c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 722.837687] env[65503]: DEBUG nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Received event network-changed-26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 722.837687] env[65503]: DEBUG nova.compute.manager [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Refreshing instance network info cache due to event network-changed-26c792db-2c05-4dd4-8223-013b2d5d3f9f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 722.837687] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Acquiring lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.837687] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Acquired lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.837687] env[65503]: DEBUG nova.network.neutron [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Refreshing network info cache for port 26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 722.839985] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e29901-3c1e-4e64-a5b0-6dad211c037f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.849679] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed0b3ba-9732-40c0-84e5-ef9411b3f471 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.868317] env[65503]: DEBUG nova.compute.provider_tree [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.870774] env[65503]: DEBUG nova.network.neutron [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Successfully created port: 633bd812-c51f-4ae0-bab2-ced08b56a04b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 722.910058] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 722.910448] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 722.924812] env[65503]: INFO nova.compute.manager [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Took 36.15 seconds to build instance. [ 723.234381] env[65503]: DEBUG nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 723.284945] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:49:18Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e416d5a6-7c96-408f-8f4c-2aff52378276',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1101851334',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 723.285924] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 723.286179] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 723.286463] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 723.286641] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 723.286792] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 723.287058] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 723.287268] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 723.287495] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 723.287700] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 723.287881] env[65503]: DEBUG nova.virt.hardware [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 723.288913] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd405c1c-cbab-44a1-8ca1-d06e2838246e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.300348] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d7565c-66bb-425c-8148-f635a22fdc10 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.340740] env[65503]: WARNING neutronclient.v2_0.client [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 723.341609] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 723.343152] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 723.376458] env[65503]: DEBUG nova.scheduler.client.report [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.429469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf36e7a1-57f3-403a-ba79-3ee67ae964ef tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "45a4b511-aa6a-433d-b136-f53686db9575" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.591s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.884925] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.886470] env[65503]: DEBUG nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 723.890207] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.824s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.890293] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.893213] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.348s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.895674] env[65503]: INFO nova.compute.claims [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.902743] env[65503]: DEBUG nova.network.neutron [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 723.944100] env[65503]: INFO nova.scheduler.client.report [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Deleted allocations for instance 5e2cf383-312b-404f-acff-2ecb75678600 [ 723.995895] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 723.996667] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 724.408329] env[65503]: DEBUG nova.compute.utils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 724.413206] env[65503]: DEBUG nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 724.413491] env[65503]: DEBUG nova.network.neutron [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 724.413964] env[65503]: WARNING neutronclient.v2_0.client [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 724.414408] env[65503]: WARNING neutronclient.v2_0.client [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 724.415108] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 724.415550] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 724.460935] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c877283-daab-4002-80a8-64c376fc62d3 tempest-ImagesOneServerTestJSON-1248912397 tempest-ImagesOneServerTestJSON-1248912397-project-member] Lock "5e2cf383-312b-404f-acff-2ecb75678600" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.008s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.568981] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c2dfc1-022d-8f22-7f8b-43dcf5e1fca2/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 724.570223] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989bceae-18ab-4c2e-a5e2-fc5e630272bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.579352] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c2dfc1-022d-8f22-7f8b-43dcf5e1fca2/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 724.579570] env[65503]: ERROR oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c2dfc1-022d-8f22-7f8b-43dcf5e1fca2/disk-0.vmdk due to incomplete transfer. [ 724.579829] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dd7b525a-382c-4557-ba3f-ce76ec7b26ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.590162] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c2dfc1-022d-8f22-7f8b-43dcf5e1fca2/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 724.590413] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Uploaded image b4d9903d-4ccc-461a-a20c-5aed03a50d5e to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 724.592742] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 724.596836] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-276e9fdb-65cf-4774-9233-6355c7e01502 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.604471] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 724.604471] env[65503]: value = "task-4449742" [ 724.604471] env[65503]: _type = "Task" [ 724.604471] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.614937] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449742, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.874230] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7bd2961-0e39-4347-b732-c1f63f5cedb3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.888387] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1651aaa-d928-4c17-943e-ac97451c148c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.928068] env[65503]: DEBUG nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 724.933612] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98817d7-0237-40ba-9599-aa50bc701951 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.948052] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719af489-fb16-49c1-9723-8f77ee6a8d79 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.966854] env[65503]: DEBUG nova.compute.provider_tree [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.023406] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 725.023406] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 725.036048] env[65503]: DEBUG nova.network.neutron [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Successfully updated port: 633bd812-c51f-4ae0-bab2-ced08b56a04b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 725.096312] env[65503]: DEBUG nova.policy [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f355cff34444fa5ae54277301d9fefd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7b84f071d5d49168314c6baf24748a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.121283] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449742, 'name': Destroy_Task, 'duration_secs': 0.377603} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.122525] env[65503]: WARNING neutronclient.v2_0.client [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 725.123986] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 725.123986] env[65503]: WARNING openstack [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 725.138020] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Destroyed the VM [ 725.138020] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 725.138020] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c0e0f2b9-9b9a-4554-bd36-57ac33f97438 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.148543] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 725.148543] env[65503]: value = "task-4449743" [ 725.148543] env[65503]: _type = "Task" [ 725.148543] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.163412] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449743, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.471704] env[65503]: DEBUG nova.scheduler.client.report [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 725.542437] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.542660] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.542842] env[65503]: DEBUG nova.network.neutron [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 725.600029] env[65503]: DEBUG nova.network.neutron [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Successfully created port: 7dd31600-1222-4005-ad0f-74f4f93fcd4b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 725.620985] env[65503]: WARNING neutronclient.v2_0.client [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 725.621728] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 725.622142] env[65503]: WARNING openstack [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 725.662468] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449743, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.945294] env[65503]: DEBUG nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 725.979486] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 725.979757] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 725.979899] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 725.980203] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 725.980402] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 725.980585] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 725.981067] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.981067] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 725.981215] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 725.981476] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 725.981691] env[65503]: DEBUG nova.virt.hardware [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 725.982526] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.090s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.983254] env[65503]: DEBUG nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 725.986943] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81eaf142-cc48-4ae0-b260-aae8ff05974e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.991132] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.233s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.991564] env[65503]: DEBUG nova.objects.instance [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lazy-loading 'resources' on Instance uuid 9297d849-a966-48da-ba6a-453c42b99e44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 725.999335] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e852826b-1a28-4491-9f1d-ad23bb137e96 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.046192] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 726.046723] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 726.164917] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449743, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.227120] env[65503]: DEBUG nova.network.neutron [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Updating instance_info_cache with network_info: [{"id": "6410d13c-8f5f-4943-818b-69c48368b69e", "address": "fa:16:3e:3e:09:5c", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6410d13c-8f", "ovs_interfaceid": "6410d13c-8f5f-4943-818b-69c48368b69e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 726.497020] env[65503]: DEBUG nova.compute.utils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 726.501225] env[65503]: DEBUG nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 726.503093] env[65503]: DEBUG nova.network.neutron [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 726.504818] env[65503]: WARNING neutronclient.v2_0.client [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 726.505474] env[65503]: WARNING neutronclient.v2_0.client [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 726.506682] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 726.507073] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 726.518536] env[65503]: DEBUG nova.compute.utils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 726.665767] env[65503]: DEBUG oslo_vmware.api [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449743, 'name': RemoveSnapshot_Task, 'duration_secs': 1.313674} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.666052] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 726.666472] env[65503]: INFO nova.compute.manager [None req-e3ac2732-f74d-4d69-b6ff-6f675ee76999 tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Took 14.86 seconds to snapshot the instance on the hypervisor. [ 726.729798] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-9acbc312-a3a2-4758-87cd-5576c4f1f8dc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.730470] env[65503]: DEBUG nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Instance network_info: |[{"id": "6410d13c-8f5f-4943-818b-69c48368b69e", "address": "fa:16:3e:3e:09:5c", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6410d13c-8f", "ovs_interfaceid": "6410d13c-8f5f-4943-818b-69c48368b69e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 726.734082] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:09:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6410d13c-8f5f-4943-818b-69c48368b69e', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 726.742215] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 726.743133] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 726.743405] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-700a3ce7-8283-4069-ad3b-5214a54fe677 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.769013] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 726.769013] env[65503]: value = "task-4449744" [ 726.769013] env[65503]: _type = "Task" [ 726.769013] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.786452] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449744, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.934967] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72861fb-65f7-4af9-9468-2f25bbc4fec0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.943966] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a40f377-1099-44b7-81ec-1975e5d746ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.982160] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef74b37-5bc1-4ae6-b465-5e14e5848b8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.994018] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39e67bb-71fe-4f78-954e-ba89bd4d89aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.007926] env[65503]: DEBUG nova.compute.provider_tree [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.022746] env[65503]: DEBUG nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 727.029805] env[65503]: DEBUG nova.network.neutron [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 727.283908] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449744, 'name': CreateVM_Task, 'duration_secs': 0.393355} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.284161] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 727.284793] env[65503]: WARNING neutronclient.v2_0.client [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 727.285238] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.285461] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.285857] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 727.286436] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98cae456-fd9b-4842-ac85-ba444e1ba8a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.301634] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 727.301634] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5261fc4f-23cd-9ef7-6f56-e3e4901f2377" [ 727.301634] env[65503]: _type = "Task" [ 727.301634] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.312552] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5261fc4f-23cd-9ef7-6f56-e3e4901f2377, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.512585] env[65503]: DEBUG nova.scheduler.client.report [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 727.727130] env[65503]: DEBUG nova.network.neutron [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Updated VIF entry in instance network info cache for port 26c792db-2c05-4dd4-8223-013b2d5d3f9f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 727.729522] env[65503]: DEBUG nova.network.neutron [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Updating instance_info_cache with network_info: [{"id": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "address": "fa:16:3e:74:d8:98", "network": {"id": "5551750c-9fbd-4b2e-a507-602c3bec1c4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1477558748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb3b7254cf72404d805209ff11130a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26c792db-2c", "ovs_interfaceid": "26c792db-2c05-4dd4-8223-013b2d5d3f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 727.767732] env[65503]: DEBUG nova.policy [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d5194dd2bd49cda00feb316dd08ae4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c024f22a228f4d2faa4b9316ca53a1ea', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 727.815775] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5261fc4f-23cd-9ef7-6f56-e3e4901f2377, 'name': SearchDatastore_Task, 'duration_secs': 0.017461} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.816448] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.816448] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.816611] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.817138] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.817138] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.817138] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-774fc19a-0cf1-48f0-94c6-f2905feb8a0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.833454] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.834244] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 727.834804] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-607fac19-5b68-4c1d-8be7-613c6d2a0882 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.841743] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 727.841743] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ae5a0-b07b-e901-518e-cc1f8c587e6b" [ 727.841743] env[65503]: _type = "Task" [ 727.841743] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.852649] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ae5a0-b07b-e901-518e-cc1f8c587e6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.865366] env[65503]: DEBUG nova.network.neutron [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Successfully updated port: 7dd31600-1222-4005-ad0f-74f4f93fcd4b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 728.019258] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.028s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.022163] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.588s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.022459] env[65503]: DEBUG nova.objects.instance [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lazy-loading 'resources' on Instance uuid 8c274097-234a-44be-9159-c2fb0f1a8da1 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.040607] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 728.041011] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 728.051175] env[65503]: DEBUG nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 728.070294] env[65503]: INFO nova.scheduler.client.report [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Deleted allocations for instance 9297d849-a966-48da-ba6a-453c42b99e44 [ 728.110367] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:47:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='31491439',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-302560939',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 728.110653] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 728.110816] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 728.111742] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 728.111742] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 728.111742] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 728.111742] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.111742] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 728.112350] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 728.112350] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 728.112350] env[65503]: DEBUG nova.virt.hardware [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 728.114972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e29c9e9-16e9-46df-aa1b-1eb864e8d35a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.125501] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47256161-188b-4c59-87b1-354d22ddff9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.235178] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3b75d1d-c73a-4b9e-affe-566c148f250c req-69439417-8dc7-45a6-8c0b-04cd9b15b056 service nova] Releasing lock "refresh_cache-38e9a714-87f8-422c-9cc5-09b6aec76198" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.354255] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ae5a0-b07b-e901-518e-cc1f8c587e6b, 'name': SearchDatastore_Task, 'duration_secs': 0.037275} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.355371] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9ad1506-6bd3-461d-a214-abcbdeb3537c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.363040] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 728.363040] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214728f-57b5-23d6-1c2e-eb40a1e0eca3" [ 728.363040] env[65503]: _type = "Task" [ 728.363040] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.368362] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.369790] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquired lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.369790] env[65503]: DEBUG nova.network.neutron [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 728.377045] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214728f-57b5-23d6-1c2e-eb40a1e0eca3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.584205] env[65503]: DEBUG oslo_concurrency.lockutils [None req-eafd55f1-2bc4-4eaf-90ca-e10d725b87ec tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "9297d849-a966-48da-ba6a-453c42b99e44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.758s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.875735] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 728.875735] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 728.892570] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214728f-57b5-23d6-1c2e-eb40a1e0eca3, 'name': SearchDatastore_Task, 'duration_secs': 0.033608} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.892831] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.893097] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9acbc312-a3a2-4758-87cd-5576c4f1f8dc/9acbc312-a3a2-4758-87cd-5576c4f1f8dc.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 728.893577] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1eba7b3-7435-42fd-9284-bf703d2c68f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.903338] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 728.903338] env[65503]: value = "task-4449745" [ 728.903338] env[65503]: _type = "Task" [ 728.903338] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.911877] env[65503]: DEBUG nova.network.neutron [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Successfully created port: f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 728.917633] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.998031] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6f4ee2-8de3-42e7-af40-ced17e5f08a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.006226] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a461efe2-aece-483b-a4dd-781c4f14fef1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.045805] env[65503]: WARNING neutronclient.v2_0.client [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.046542] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 729.046907] env[65503]: WARNING openstack [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 729.056906] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79953f45-1482-4fdd-b5da-81d3d251594b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.065816] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e15e77-d02f-4d2c-9228-8b52b93b31e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.082034] env[65503]: DEBUG nova.compute.provider_tree [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.377930] env[65503]: DEBUG nova.compute.manager [req-a06030d9-98c0-4933-83a8-d4a902b81a69 req-26307933-0b6f-4280-962e-2ef2b42a20b9 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Received event network-vif-plugged-6410d13c-8f5f-4943-818b-69c48368b69e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 729.378302] env[65503]: DEBUG oslo_concurrency.lockutils [req-a06030d9-98c0-4933-83a8-d4a902b81a69 req-26307933-0b6f-4280-962e-2ef2b42a20b9 service nova] Acquiring lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.378549] env[65503]: DEBUG oslo_concurrency.lockutils [req-a06030d9-98c0-4933-83a8-d4a902b81a69 req-26307933-0b6f-4280-962e-2ef2b42a20b9 service nova] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.378720] env[65503]: DEBUG oslo_concurrency.lockutils [req-a06030d9-98c0-4933-83a8-d4a902b81a69 req-26307933-0b6f-4280-962e-2ef2b42a20b9 service nova] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.378956] env[65503]: DEBUG nova.compute.manager [req-a06030d9-98c0-4933-83a8-d4a902b81a69 req-26307933-0b6f-4280-962e-2ef2b42a20b9 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] No waiting events found dispatching network-vif-plugged-6410d13c-8f5f-4943-818b-69c48368b69e {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 729.379148] env[65503]: WARNING nova.compute.manager [req-a06030d9-98c0-4933-83a8-d4a902b81a69 req-26307933-0b6f-4280-962e-2ef2b42a20b9 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Received unexpected event network-vif-plugged-6410d13c-8f5f-4943-818b-69c48368b69e for instance with vm_state building and task_state spawning. [ 729.399656] env[65503]: DEBUG nova.network.neutron [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 729.421863] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449745, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.585811] env[65503]: DEBUG nova.scheduler.client.report [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 729.915340] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449745, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.092632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.095145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 10.958s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.095469] env[65503]: DEBUG nova.objects.instance [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 730.118784] env[65503]: INFO nova.scheduler.client.report [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Deleted allocations for instance 8c274097-234a-44be-9159-c2fb0f1a8da1 [ 730.260875] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 730.260875] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 730.418991] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449745, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.628424] env[65503]: DEBUG oslo_concurrency.lockutils [None req-386a3570-04f3-4959-8833-bedf23d8fe38 tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "8c274097-234a-44be-9159-c2fb0f1a8da1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.837s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.643494] env[65503]: DEBUG nova.network.neutron [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [{"id": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "address": "fa:16:3e:b6:69:8c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633bd812-c5", "ovs_interfaceid": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 730.918209] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449745, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.613471} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.918583] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9acbc312-a3a2-4758-87cd-5576c4f1f8dc/9acbc312-a3a2-4758-87cd-5576c4f1f8dc.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 730.918702] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 730.918938] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7b68174-5fee-496d-af0c-cedfc48b84da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.927408] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 730.927408] env[65503]: value = "task-4449746" [ 730.927408] env[65503]: _type = "Task" [ 730.927408] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.942408] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.945188] env[65503]: DEBUG nova.network.neutron [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Successfully updated port: f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 731.106212] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ded877af-7b52-4629-98d4-8e550b0761da tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.107455] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.685s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.110847] env[65503]: INFO nova.compute.claims [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.146774] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.148292] env[65503]: DEBUG nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Instance network_info: |[{"id": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "address": "fa:16:3e:b6:69:8c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633bd812-c5", "ovs_interfaceid": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 731.148292] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:69:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '633bd812-c51f-4ae0-bab2-ced08b56a04b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 731.156065] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 731.156911] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 731.157184] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79e0d660-d8ac-4e8c-9f3a-30ba12e4a850 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.186258] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 731.186258] env[65503]: value = "task-4449747" [ 731.186258] env[65503]: _type = "Task" [ 731.186258] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.195790] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449747, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.439973] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077459} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.439973] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.441618] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34220f3b-ba27-46f6-a873-a6b6a55b4bf8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.460184] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.460495] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.460702] env[65503]: DEBUG nova.network.neutron [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 731.471904] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 9acbc312-a3a2-4758-87cd-5576c4f1f8dc/9acbc312-a3a2-4758-87cd-5576c4f1f8dc.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.473627] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658c9747-8c92-4a74-9182-d4fc7988660d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.500373] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 731.500373] env[65503]: value = "task-4449748" [ 731.500373] env[65503]: _type = "Task" [ 731.500373] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.511983] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.696418] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449747, 'name': CreateVM_Task, 'duration_secs': 0.432371} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.696599] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 731.697112] env[65503]: WARNING neutronclient.v2_0.client [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 731.697478] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.697630] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.697958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 731.698573] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78eb4529-b6f5-45b1-b5cb-0d5cbb05c14b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.704897] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 731.704897] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a8b0a2-02df-88ff-9b6d-278e4663d14c" [ 731.704897] env[65503]: _type = "Task" [ 731.704897] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.714705] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a8b0a2-02df-88ff-9b6d-278e4663d14c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.975194] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 731.975194] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 732.016388] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449748, 'name': ReconfigVM_Task, 'duration_secs': 0.329758} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.016704] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 9acbc312-a3a2-4758-87cd-5576c4f1f8dc/9acbc312-a3a2-4758-87cd-5576c4f1f8dc.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.017501] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5eb84fa6-e208-4ab3-b996-8f88138d5084 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.028753] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 732.028753] env[65503]: value = "task-4449749" [ 732.028753] env[65503]: _type = "Task" [ 732.028753] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.043317] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449749, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.220438] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a8b0a2-02df-88ff-9b6d-278e4663d14c, 'name': SearchDatastore_Task, 'duration_secs': 0.015347} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.220940] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.222044] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.222505] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.222620] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.223948] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.224157] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ca5b81a-04b3-42e4-80d7-b178acffc0f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.240161] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.240161] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 732.240161] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18c36cb4-b292-4a04-800f-d652d8a762da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.250100] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 732.250100] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528e1f1c-3224-7d1a-7a2e-03e186a23480" [ 732.250100] env[65503]: _type = "Task" [ 732.250100] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.262162] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528e1f1c-3224-7d1a-7a2e-03e186a23480, 'name': SearchDatastore_Task, 'duration_secs': 0.011689} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.266421] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d6b01b-d4b5-4607-902b-b82813864ff6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.277026] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 732.277026] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f68237-8e2b-4953-538b-7bab0324bdd4" [ 732.277026] env[65503]: _type = "Task" [ 732.277026] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.284940] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f68237-8e2b-4953-538b-7bab0324bdd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.384191] env[65503]: WARNING neutronclient.v2_0.client [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 732.384921] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 732.385279] env[65503]: WARNING openstack [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 732.542254] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449749, 'name': Rename_Task, 'duration_secs': 0.195972} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.545569] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 732.546133] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b82e1ce4-ec0b-4dfe-93dc-7c50a2451a8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.554781] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 732.554781] env[65503]: value = "task-4449750" [ 732.554781] env[65503]: _type = "Task" [ 732.554781] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.570619] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449750, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.602461] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5959c5c5-24a3-49a6-a6d0-adb610fc56a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.615154] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c891f8-aa62-45fd-adb7-b69345a7e9ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.655258] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5e9bc0-a447-460f-a6d6-5c2ec46a4c0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.666547] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229cbb2d-cc9e-4a6a-91fd-8ba962ab403d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.689208] env[65503]: DEBUG nova.compute.provider_tree [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.786758] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f68237-8e2b-4953-538b-7bab0324bdd4, 'name': SearchDatastore_Task, 'duration_secs': 0.010786} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.787362] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.787362] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 732.787576] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-831d4359-54ec-4b74-9893-f16c1de865b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.797168] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 732.797168] env[65503]: value = "task-4449751" [ 732.797168] env[65503]: _type = "Task" [ 732.797168] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.798253] env[65503]: DEBUG nova.network.neutron [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 732.812779] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.071604] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449750, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.193937] env[65503]: DEBUG nova.scheduler.client.report [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 733.314136] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449751, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.571724] env[65503]: DEBUG oslo_vmware.api [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449750, 'name': PowerOnVM_Task, 'duration_secs': 0.654831} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.572281] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 733.572426] env[65503]: INFO nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Took 12.99 seconds to spawn the instance on the hypervisor. [ 733.572505] env[65503]: DEBUG nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 733.573398] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3a34e1-1be6-49b7-8e38-e9b210aaab69 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.701325] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.701325] env[65503]: DEBUG nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 733.815347] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542354} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.815639] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 733.815849] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.816134] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4fffa36-f401-4034-9f38-33519f122dca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.825732] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 733.825732] env[65503]: value = "task-4449752" [ 733.825732] env[65503]: _type = "Task" [ 733.825732] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.836975] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449752, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.091080] env[65503]: INFO nova.compute.manager [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Took 45.61 seconds to build instance. [ 734.140693] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 734.141388] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 734.208968] env[65503]: DEBUG nova.compute.utils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 734.209550] env[65503]: DEBUG nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 734.212588] env[65503]: DEBUG nova.network.neutron [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 734.212588] env[65503]: WARNING neutronclient.v2_0.client [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 734.212588] env[65503]: WARNING neutronclient.v2_0.client [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 734.212588] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 734.212588] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 734.341237] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449752, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079096} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.341742] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 734.343213] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e71367-1100-4030-af04-00f38aeafbb2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.376028] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 734.376028] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-169b3a31-c0fe-4b96-bf65-742e25925ec7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.393285] env[65503]: DEBUG nova.network.neutron [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Updating instance_info_cache with network_info: [{"id": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "address": "fa:16:3e:7e:79:ca", "network": {"id": "88e75d0a-b4c5-4ba5-bc83-f6baa008b75e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-195385216-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7b84f071d5d49168314c6baf24748a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89463199-7ddf-4ee7-b485-1629a75b4b8f", "external-id": "nsx-vlan-transportzone-302", "segmentation_id": 302, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd31600-12", "ovs_interfaceid": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 734.403615] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 734.403615] env[65503]: value = "task-4449753" [ 734.403615] env[65503]: _type = "Task" [ 734.403615] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.418043] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449753, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.571748] env[65503]: DEBUG nova.policy [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d9b7ad1c9a343e28e03a91c3463ec83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86a9eddff6df487bb469066f7e1a0dde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 734.595501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3a9eebdd-50d0-4d0d-8890-bd0b9fbe1bd4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.121s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.719272] env[65503]: DEBUG nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 734.895986] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Releasing lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.896944] env[65503]: DEBUG nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Instance network_info: |[{"id": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "address": "fa:16:3e:7e:79:ca", "network": {"id": "88e75d0a-b4c5-4ba5-bc83-f6baa008b75e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-195385216-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7b84f071d5d49168314c6baf24748a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89463199-7ddf-4ee7-b485-1629a75b4b8f", "external-id": "nsx-vlan-transportzone-302", "segmentation_id": 302, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd31600-12", "ovs_interfaceid": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 734.896944] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:79:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89463199-7ddf-4ee7-b485-1629a75b4b8f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dd31600-1222-4005-ad0f-74f4f93fcd4b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.905189] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Creating folder: Project (d7b84f071d5d49168314c6baf24748a1). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.905604] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0f2da80-910c-4231-94f2-8e932bee015a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.918649] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449753, 'name': ReconfigVM_Task, 'duration_secs': 0.288766} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.918649] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.919254] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-010817be-993b-43c8-bf95-cec1134afa4c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.922565] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Created folder: Project (d7b84f071d5d49168314c6baf24748a1) in parent group-v870190. [ 734.922751] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Creating folder: Instances. Parent ref: group-v870306. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.923427] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f5c3733-d938-44d3-ba11-b0937cb48d48 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.928132] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 734.928132] env[65503]: value = "task-4449755" [ 734.928132] env[65503]: _type = "Task" [ 734.928132] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.936162] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Created folder: Instances in parent group-v870306. [ 734.936432] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 734.939703] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 734.939969] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449755, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.940234] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b23beae-159c-46b3-b9a7-371e20c55cc0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.963342] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.963342] env[65503]: value = "task-4449757" [ 734.963342] env[65503]: _type = "Task" [ 734.963342] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.973449] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449757, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.390128] env[65503]: WARNING neutronclient.v2_0.client [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 735.390937] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 735.391334] env[65503]: WARNING openstack [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 735.442620] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449755, 'name': Rename_Task, 'duration_secs': 0.206718} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.442963] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 735.443261] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71c287a5-98e7-44e7-9d2e-0a2b6fcbf04e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.453013] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 735.453013] env[65503]: value = "task-4449758" [ 735.453013] env[65503]: _type = "Task" [ 735.453013] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.464265] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.475092] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449757, 'name': CreateVM_Task, 'duration_secs': 0.410057} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.475589] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 735.476201] env[65503]: WARNING neutronclient.v2_0.client [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 735.476695] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.476989] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.477561] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 735.478072] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e62d05b-16b1-4da6-be3f-a674ad485acd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.486346] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 735.486346] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b1db10-f2c2-fef7-77c4-5c562ae35bbb" [ 735.486346] env[65503]: _type = "Task" [ 735.486346] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.504508] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b1db10-f2c2-fef7-77c4-5c562ae35bbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.733578] env[65503]: DEBUG nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 735.781057] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 735.781057] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.781057] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 735.781057] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.781057] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 735.781057] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 735.781732] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 735.782143] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 735.782620] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 735.782897] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 735.783183] env[65503]: DEBUG nova.virt.hardware [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 735.784206] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912a254f-fda9-4c69-8a1d-0833c9f9879a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.798668] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42480df1-4187-47b7-a3b5-4897b4a55e7d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.871374] env[65503]: DEBUG nova.network.neutron [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Successfully created port: 2da53c45-1677-47a0-99c7-20ed6ebfc0ad {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 735.968179] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449758, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.000524] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b1db10-f2c2-fef7-77c4-5c562ae35bbb, 'name': SearchDatastore_Task, 'duration_secs': 0.014142} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.001382] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.001382] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 736.002040] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.002215] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.002400] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 736.002708] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-816e4bb1-5e72-4e57-8fbd-971afc7f0407 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.015397] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 736.015600] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 736.017656] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e646697-ce38-41c0-9441-504c5bd9e2e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.027766] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 736.027766] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a21524-1267-046d-2c18-d7afa3383f3c" [ 736.027766] env[65503]: _type = "Task" [ 736.027766] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.041220] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a21524-1267-046d-2c18-d7afa3383f3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.471956] env[65503]: DEBUG oslo_vmware.api [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449758, 'name': PowerOnVM_Task, 'duration_secs': 0.759554} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.472384] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 736.472686] env[65503]: INFO nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Took 13.24 seconds to spawn the instance on the hypervisor. [ 736.472955] env[65503]: DEBUG nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 736.474837] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff9f193-dd91-4a99-bbfe-f87faa41907e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.547193] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a21524-1267-046d-2c18-d7afa3383f3c, 'name': SearchDatastore_Task, 'duration_secs': 0.013052} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.550246] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e731d85-8248-47de-8e38-f90515ba81a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.562340] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 736.562340] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5220e1c8-f4c3-13f5-4401-cfff0b98a4e7" [ 736.562340] env[65503]: _type = "Task" [ 736.562340] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.574840] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5220e1c8-f4c3-13f5-4401-cfff0b98a4e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.010525] env[65503]: DEBUG nova.network.neutron [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Updating instance_info_cache with network_info: [{"id": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "address": "fa:16:3e:bc:42:78", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e5c61e-0d", "ovs_interfaceid": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 737.011908] env[65503]: INFO nova.compute.manager [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Took 42.29 seconds to build instance. [ 737.075579] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5220e1c8-f4c3-13f5-4401-cfff0b98a4e7, 'name': SearchDatastore_Task, 'duration_secs': 0.0288} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.075579] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.075818] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 75578ccd-2b34-4948-9afa-ac94e9fd8b4b/75578ccd-2b34-4948-9afa-ac94e9fd8b4b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.076110] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-974c5557-5b1e-4b81-b512-97a2928861fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.084578] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 737.084578] env[65503]: value = "task-4449759" [ 737.084578] env[65503]: _type = "Task" [ 737.084578] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.095319] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449759, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.476026] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Acquiring lock "db942a2d-671b-4036-a80b-d2375145cd29" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.476270] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lock "db942a2d-671b-4036-a80b-d2375145cd29" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.514811] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Releasing lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.515377] env[65503]: DEBUG nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Instance network_info: |[{"id": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "address": "fa:16:3e:bc:42:78", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e5c61e-0d", "ovs_interfaceid": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 737.515991] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03564b62-3dcd-4394-819d-4b8cbcc26605 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.804s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.516982] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:42:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5e5c61e-0df6-47d9-ab15-21c80b68c833', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.529133] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 737.530364] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.530879] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-def117f0-ad35-4200-a06d-ea828abd4aef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.568707] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.568707] env[65503]: value = "task-4449760" [ 737.568707] env[65503]: _type = "Task" [ 737.568707] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.581446] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449760, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.596676] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449759, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.985405] env[65503]: DEBUG nova.compute.utils [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 738.080858] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449760, 'name': CreateVM_Task, 'duration_secs': 0.441262} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.081042] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 738.081688] env[65503]: WARNING neutronclient.v2_0.client [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 738.082068] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.085797] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.085797] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 738.085797] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8bafae0-b1ee-4433-b483-385a0a755c09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.091730] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 738.091730] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521a97dd-e24c-195c-5375-4df0a02fedd8" [ 738.091730] env[65503]: _type = "Task" [ 738.091730] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.101116] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449759, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556944} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.101116] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 75578ccd-2b34-4948-9afa-ac94e9fd8b4b/75578ccd-2b34-4948-9afa-ac94e9fd8b4b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.101116] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.101481] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad89e15b-d103-4197-bd2e-e3f4c8417c66 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.109064] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521a97dd-e24c-195c-5375-4df0a02fedd8, 'name': SearchDatastore_Task, 'duration_secs': 0.011807} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.109868] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.110186] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.110494] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.110707] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.110977] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.111320] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36b0cc1d-bda3-483e-905e-328177b248ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.115345] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 738.115345] env[65503]: value = "task-4449761" [ 738.115345] env[65503]: _type = "Task" [ 738.115345] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.125046] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.125347] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 738.130483] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a9d8454-ad76-464a-b237-b4f686e3fb46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.133323] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.137675] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 738.137675] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5223bf9d-c01f-386b-cdc7-da37181c37a4" [ 738.137675] env[65503]: _type = "Task" [ 738.137675] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.147083] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5223bf9d-c01f-386b-cdc7-da37181c37a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.491152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lock "db942a2d-671b-4036-a80b-d2375145cd29" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.553711] env[65503]: DEBUG nova.network.neutron [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Successfully updated port: 2da53c45-1677-47a0-99c7-20ed6ebfc0ad {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 738.628113] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138262} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.629055] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.629883] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58674c01-f754-4be6-8123-2e2bff44ae11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.654862] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 75578ccd-2b34-4948-9afa-ac94e9fd8b4b/75578ccd-2b34-4948-9afa-ac94e9fd8b4b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.658679] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8fbd747-aba9-4f00-b7e9-199dd18c5ae3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.682369] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5223bf9d-c01f-386b-cdc7-da37181c37a4, 'name': SearchDatastore_Task, 'duration_secs': 0.011257} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.685331] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 738.685331] env[65503]: value = "task-4449762" [ 738.685331] env[65503]: _type = "Task" [ 738.685331] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.685331] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ae636ac-96a5-4e3e-97aa-6a04c157be12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.699478] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 738.699478] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5263a284-faf6-a2e5-e082-abe7dc90ef93" [ 738.699478] env[65503]: _type = "Task" [ 738.699478] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.706435] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.716350] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5263a284-faf6-a2e5-e082-abe7dc90ef93, 'name': SearchDatastore_Task, 'duration_secs': 0.015389} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.716639] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.716889] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] b6cda94b-2894-4cf0-8522-6593df9723bd/b6cda94b-2894-4cf0-8522-6593df9723bd.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.717179] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35edbbce-262a-4618-9608-b735244478fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.726281] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 738.726281] env[65503]: value = "task-4449763" [ 738.726281] env[65503]: _type = "Task" [ 738.726281] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.737622] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449763, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.056262] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "refresh_cache-7ed036d1-8188-4aab-9d6d-8d7e46147812" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.057254] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquired lock "refresh_cache-7ed036d1-8188-4aab-9d6d-8d7e46147812" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.057254] env[65503]: DEBUG nova.network.neutron [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 739.205184] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.239152] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449763, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.561011] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 739.561011] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 739.636705] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Acquiring lock "db942a2d-671b-4036-a80b-d2375145cd29" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.637484] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lock "db942a2d-671b-4036-a80b-d2375145cd29" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.639526] env[65503]: INFO nova.compute.manager [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Attaching volume b697b167-e710-40c5-8992-c508c36ac340 to /dev/sdb [ 739.678280] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205bfbc3-7360-40cc-b74a-ca5e4d9df517 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.686971] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c28f01e-b427-4f3b-9da7-b68afd6c3fc1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.701051] env[65503]: DEBUG nova.virt.block_device [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Updating existing volume attachment record: cf32660b-64ac-4e0d-9e9d-64ddd3a9b1c2 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 739.709454] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449762, 'name': ReconfigVM_Task, 'duration_secs': 0.677346} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.709590] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 75578ccd-2b34-4948-9afa-ac94e9fd8b4b/75578ccd-2b34-4948-9afa-ac94e9fd8b4b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.710252] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7445c8ba-3340-4a47-8401-ee0341afa8e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.720191] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 739.720191] env[65503]: value = "task-4449764" [ 739.720191] env[65503]: _type = "Task" [ 739.720191] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.728663] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449764, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.737542] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449763, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584389} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.737822] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] b6cda94b-2894-4cf0-8522-6593df9723bd/b6cda94b-2894-4cf0-8522-6593df9723bd.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.740617] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.740617] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9b461f9-93ec-4ced-af55-1a27eecdb9ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.748809] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 739.748809] env[65503]: value = "task-4449765" [ 739.748809] env[65503]: _type = "Task" [ 739.748809] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.759036] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449765, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.891236] env[65503]: DEBUG nova.network.neutron [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 740.205211] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 740.206070] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 740.232787] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449764, 'name': Rename_Task, 'duration_secs': 0.172354} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.233728] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 740.234221] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03de3df3-c543-4471-9b06-6f63ad89c7fb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.248965] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 740.248965] env[65503]: value = "task-4449769" [ 740.248965] env[65503]: _type = "Task" [ 740.248965] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.267931] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449765, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092038} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.268215] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.268475] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.269303] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daed1384-3722-45e2-9a42-895ef5471dca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.295151] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] b6cda94b-2894-4cf0-8522-6593df9723bd/b6cda94b-2894-4cf0-8522-6593df9723bd.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.295894] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-196d3446-ae67-4731-ab44-598f2656e4bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.317727] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 740.317727] env[65503]: value = "task-4449770" [ 740.317727] env[65503]: _type = "Task" [ 740.317727] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.327301] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.698320] env[65503]: WARNING neutronclient.v2_0.client [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 740.698320] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 740.698320] env[65503]: WARNING openstack [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 740.762078] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449769, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.802208] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Received event network-changed-6410d13c-8f5f-4943-818b-69c48368b69e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 740.802508] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Refreshing instance network info cache due to event network-changed-6410d13c-8f5f-4943-818b-69c48368b69e. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 740.802859] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "refresh_cache-9acbc312-a3a2-4758-87cd-5576c4f1f8dc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.803928] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquired lock "refresh_cache-9acbc312-a3a2-4758-87cd-5576c4f1f8dc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.803928] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Refreshing network info cache for port 6410d13c-8f5f-4943-818b-69c48368b69e {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 740.831180] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449770, 'name': ReconfigVM_Task, 'duration_secs': 0.500405} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.831342] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Reconfigured VM instance instance-00000028 to attach disk [datastore1] b6cda94b-2894-4cf0-8522-6593df9723bd/b6cda94b-2894-4cf0-8522-6593df9723bd.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.831640] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=65503) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 740.832473] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-74e93bbc-6625-4d52-965e-cfc3d364889b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.842345] env[65503]: DEBUG nova.network.neutron [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Updating instance_info_cache with network_info: [{"id": "2da53c45-1677-47a0-99c7-20ed6ebfc0ad", "address": "fa:16:3e:a1:e4:4b", "network": {"id": "7d2c1d69-8f8c-4413-98d4-7389dd23bffa", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2110345902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "86a9eddff6df487bb469066f7e1a0dde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2da53c45-16", "ovs_interfaceid": "2da53c45-1677-47a0-99c7-20ed6ebfc0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 740.846651] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 740.846651] env[65503]: value = "task-4449771" [ 740.846651] env[65503]: _type = "Task" [ 740.846651] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.859662] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449771, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.266079] env[65503]: DEBUG oslo_vmware.api [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449769, 'name': PowerOnVM_Task, 'duration_secs': 0.698507} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.266438] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 741.266578] env[65503]: INFO nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Took 15.32 seconds to spawn the instance on the hypervisor. [ 741.266757] env[65503]: DEBUG nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 741.267651] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef27739-dbdc-4015-824d-b96ab7154bb6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.307455] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 741.312021] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 741.312021] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 741.343795] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Releasing lock "refresh_cache-7ed036d1-8188-4aab-9d6d-8d7e46147812" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.344704] env[65503]: DEBUG nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Instance network_info: |[{"id": "2da53c45-1677-47a0-99c7-20ed6ebfc0ad", "address": "fa:16:3e:a1:e4:4b", "network": {"id": "7d2c1d69-8f8c-4413-98d4-7389dd23bffa", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2110345902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "86a9eddff6df487bb469066f7e1a0dde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2da53c45-16", "ovs_interfaceid": "2da53c45-1677-47a0-99c7-20ed6ebfc0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 741.346300] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:e4:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e51ebca-e0f8-4b77-b155-4ff928eef130', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2da53c45-1677-47a0-99c7-20ed6ebfc0ad', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.355710] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Creating folder: Project (86a9eddff6df487bb469066f7e1a0dde). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 741.358664] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5e67393-564c-4ce9-9048-c8080f0e1a4b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.375124] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449771, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.065802} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.375124] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=65503) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 741.375124] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084a2a98-9b7b-4507-9359-cf294d33ffed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.409633] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] b6cda94b-2894-4cf0-8522-6593df9723bd/ephemeral_0.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.412952] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70933192-002d-45e0-a413-2f7eb917b440 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.430193] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Created folder: Project (86a9eddff6df487bb469066f7e1a0dde) in parent group-v870190. [ 741.430309] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Creating folder: Instances. Parent ref: group-v870313. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 741.432206] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab16a34a-79e6-4371-803e-3898783f8cc6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.441542] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 741.441542] env[65503]: value = "task-4449774" [ 741.441542] env[65503]: _type = "Task" [ 741.441542] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.448696] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Created folder: Instances in parent group-v870313. [ 741.448859] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 741.449725] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 741.450099] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc2a5356-873b-4536-87e2-f77c3fc6bc9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.472214] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449774, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.481407] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.481407] env[65503]: value = "task-4449775" [ 741.481407] env[65503]: _type = "Task" [ 741.481407] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.489706] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449775, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.565380] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.565607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.719098] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 741.719098] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 741.800818] env[65503]: INFO nova.compute.manager [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Took 34.29 seconds to build instance. [ 741.929766] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 741.930607] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 741.931036] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 741.953682] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.003876] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449775, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.055712] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Updated VIF entry in instance network info cache for port 6410d13c-8f5f-4943-818b-69c48368b69e. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 742.056430] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Updating instance_info_cache with network_info: [{"id": "6410d13c-8f5f-4943-818b-69c48368b69e", "address": "fa:16:3e:3e:09:5c", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6410d13c-8f", "ovs_interfaceid": "6410d13c-8f5f-4943-818b-69c48368b69e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 742.069584] env[65503]: DEBUG nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 742.202551] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.202877] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.206439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.206439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.206439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.206439] env[65503]: INFO nova.compute.manager [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Terminating instance [ 742.313738] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e5db912-b418-48fc-a1df-b1ce2fe52e95 tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.819s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.314664] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "eec6a484-ab00-402e-a369-c3009065c553" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.314886] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "eec6a484-ab00-402e-a369-c3009065c553" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.315109] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "eec6a484-ab00-402e-a369-c3009065c553-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.315305] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "eec6a484-ab00-402e-a369-c3009065c553-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.315498] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "eec6a484-ab00-402e-a369-c3009065c553-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.317881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.317881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.319267] env[65503]: INFO nova.compute.manager [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Terminating instance [ 742.452976] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449774, 'name': ReconfigVM_Task, 'duration_secs': 0.698231} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.453614] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Reconfigured VM instance instance-00000028 to attach disk [datastore1] b6cda94b-2894-4cf0-8522-6593df9723bd/ephemeral_0.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.454339] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9da85193-38ef-4bf1-9e10-73f93931c1e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.462835] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 742.462835] env[65503]: value = "task-4449776" [ 742.462835] env[65503]: _type = "Task" [ 742.462835] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.474508] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449776, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.491835] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449775, 'name': CreateVM_Task, 'duration_secs': 0.964977} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.492070] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 742.492704] env[65503]: WARNING neutronclient.v2_0.client [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 742.493085] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.493285] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.493603] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 742.493970] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ac8aa40-8233-45e7-a705-5b5ca242283f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.500167] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 742.500167] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5234b590-24e4-04c3-b97a-4da009e72a82" [ 742.500167] env[65503]: _type = "Task" [ 742.500167] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.513101] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5234b590-24e4-04c3-b97a-4da009e72a82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.559979] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Releasing lock "refresh_cache-9acbc312-a3a2-4758-87cd-5576c4f1f8dc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.560304] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Received event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 742.560468] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing instance network info cache due to event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 742.560680] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.560814] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquired lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.560973] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 742.597624] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.597896] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.599454] env[65503]: INFO nova.compute.claims [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.662572] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.663880] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.713032] env[65503]: DEBUG nova.compute.manager [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 742.713032] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.713032] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919db0f1-da36-49ba-a54c-e0bd200af195 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.722928] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 742.723744] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4397400d-3959-4bc9-ba04-5a11260c1c5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.733231] env[65503]: DEBUG oslo_vmware.api [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 742.733231] env[65503]: value = "task-4449777" [ 742.733231] env[65503]: _type = "Task" [ 742.733231] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.749850] env[65503]: DEBUG oslo_vmware.api [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449777, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.823418] env[65503]: DEBUG nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 742.828784] env[65503]: DEBUG nova.compute.manager [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 742.829071] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.830732] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acf8523-ecea-4bc3-a45e-49e48831930b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.840406] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 742.840406] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47c8e555-f568-4c3f-a853-0fe7b7346cc4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.847483] env[65503]: DEBUG oslo_vmware.api [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 742.847483] env[65503]: value = "task-4449778" [ 742.847483] env[65503]: _type = "Task" [ 742.847483] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.858328] env[65503]: DEBUG oslo_vmware.api [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449778, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.973999] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449776, 'name': Rename_Task, 'duration_secs': 0.347582} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.974368] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.974634] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a02e609-7708-4784-8023-1509be0aa61a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.983273] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 742.983273] env[65503]: value = "task-4449779" [ 742.983273] env[65503]: _type = "Task" [ 742.983273] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.994870] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.013059] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5234b590-24e4-04c3-b97a-4da009e72a82, 'name': SearchDatastore_Task, 'duration_secs': 0.014836} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.013059] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.013059] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.013059] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.013306] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.013466] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.013758] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55c1197b-91ca-4585-bcf5-c934b4b507ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.035414] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.035650] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.037035] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83e786ed-f13f-4b3a-a3e5-715731c8d4f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.043936] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 743.043936] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521e873a-fefa-3bb5-1089-9379d9e77af3" [ 743.043936] env[65503]: _type = "Task" [ 743.043936] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.057962] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521e873a-fefa-3bb5-1089-9379d9e77af3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.066487] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 743.066659] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 743.067061] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 743.167630] env[65503]: DEBUG nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 743.249234] env[65503]: DEBUG oslo_vmware.api [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449777, 'name': PowerOffVM_Task, 'duration_secs': 0.271351} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.249560] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 743.249849] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 743.250488] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-499c893b-b69c-4f8c-870b-6483a7764c33 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.289694] env[65503]: DEBUG nova.compute.manager [req-be4e9e31-53f3-40a0-9ff4-25b34f633873 req-8019625b-7522-4fd2-bd06-1b3a6ca52772 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Received event network-vif-plugged-2da53c45-1677-47a0-99c7-20ed6ebfc0ad {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 743.290021] env[65503]: DEBUG oslo_concurrency.lockutils [req-be4e9e31-53f3-40a0-9ff4-25b34f633873 req-8019625b-7522-4fd2-bd06-1b3a6ca52772 service nova] Acquiring lock "7ed036d1-8188-4aab-9d6d-8d7e46147812-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.290140] env[65503]: DEBUG oslo_concurrency.lockutils [req-be4e9e31-53f3-40a0-9ff4-25b34f633873 req-8019625b-7522-4fd2-bd06-1b3a6ca52772 service nova] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.290863] env[65503]: DEBUG oslo_concurrency.lockutils [req-be4e9e31-53f3-40a0-9ff4-25b34f633873 req-8019625b-7522-4fd2-bd06-1b3a6ca52772 service nova] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.290863] env[65503]: DEBUG nova.compute.manager [req-be4e9e31-53f3-40a0-9ff4-25b34f633873 req-8019625b-7522-4fd2-bd06-1b3a6ca52772 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] No waiting events found dispatching network-vif-plugged-2da53c45-1677-47a0-99c7-20ed6ebfc0ad {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 743.290863] env[65503]: WARNING nova.compute.manager [req-be4e9e31-53f3-40a0-9ff4-25b34f633873 req-8019625b-7522-4fd2-bd06-1b3a6ca52772 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Received unexpected event network-vif-plugged-2da53c45-1677-47a0-99c7-20ed6ebfc0ad for instance with vm_state building and task_state spawning. [ 743.324817] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 743.325123] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 743.325734] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleting the datastore file [datastore2] 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.326096] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd372679-6246-48c5-89e3-61936baef430 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.355822] env[65503]: DEBUG oslo_vmware.api [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 743.355822] env[65503]: value = "task-4449781" [ 743.355822] env[65503]: _type = "Task" [ 743.355822] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.363974] env[65503]: DEBUG oslo_vmware.api [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449778, 'name': PowerOffVM_Task, 'duration_secs': 0.24343} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.364831] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 743.365106] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 743.365557] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d24ac5af-1609-46bb-8040-1dc05714bc38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.371807] env[65503]: DEBUG oslo_vmware.api [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.375151] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.395073] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "5cefb589-9947-4fc1-89b4-d888f8c8f644" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.396466] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.450171] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 743.450493] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 743.450766] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Deleting the datastore file [datastore2] eec6a484-ab00-402e-a369-c3009065c553 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.451132] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cc72672-6a1e-4a0f-ae73-f8e7f15ac356 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.461098] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 743.461499] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 743.471457] env[65503]: DEBUG oslo_vmware.api [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for the task: (returnval){ [ 743.471457] env[65503]: value = "task-4449783" [ 743.471457] env[65503]: _type = "Task" [ 743.471457] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.482152] env[65503]: DEBUG oslo_vmware.api [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.495751] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449779, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.555895] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521e873a-fefa-3bb5-1089-9379d9e77af3, 'name': SearchDatastore_Task, 'duration_secs': 0.014258} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.556772] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72cc34c-0811-43d6-8e01-20d0a0697667 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.563593] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 743.563593] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5220722d-b5c2-9245-ebaa-d1fb3d39855d" [ 743.563593] env[65503]: _type = "Task" [ 743.563593] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.579615] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5220722d-b5c2-9245-ebaa-d1fb3d39855d, 'name': SearchDatastore_Task, 'duration_secs': 0.01258} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.579896] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.580166] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 7ed036d1-8188-4aab-9d6d-8d7e46147812/7ed036d1-8188-4aab-9d6d-8d7e46147812.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 743.580466] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-115a72d5-e451-4f19-942e-ebb5acdd3967 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.593676] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 743.593676] env[65503]: value = "task-4449784" [ 743.593676] env[65503]: _type = "Task" [ 743.593676] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.604947] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.671020] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 743.671071] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 743.672518] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 743.713338] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.868998] env[65503]: DEBUG oslo_vmware.api [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240034} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.869431] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.869541] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.869741] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.870240] env[65503]: INFO nova.compute.manager [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 743.870240] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 743.870482] env[65503]: DEBUG nova.compute.manager [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 743.870591] env[65503]: DEBUG nova.network.neutron [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 743.870858] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 743.871586] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 743.871868] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 743.898413] env[65503]: DEBUG nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 743.917988] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updated VIF entry in instance network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 743.918560] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 743.994500] env[65503]: DEBUG oslo_vmware.api [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Task: {'id': task-4449783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207537} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.997416] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.997416] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.997416] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.997416] env[65503]: INFO nova.compute.manager [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] [instance: eec6a484-ab00-402e-a369-c3009065c553] Took 1.17 seconds to destroy the instance on the hypervisor. [ 743.997416] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 743.997416] env[65503]: DEBUG nova.compute.manager [-] [instance: eec6a484-ab00-402e-a369-c3009065c553] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 743.997416] env[65503]: DEBUG nova.network.neutron [-] [instance: eec6a484-ab00-402e-a369-c3009065c553] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 743.997416] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 743.997888] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 743.997888] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 744.012971] env[65503]: DEBUG oslo_vmware.api [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4449779, 'name': PowerOnVM_Task, 'duration_secs': 0.772011} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.016968] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 744.017274] env[65503]: INFO nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Took 15.97 seconds to spawn the instance on the hypervisor. [ 744.017423] env[65503]: DEBUG nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 744.017741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.017939] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.020695] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b786d495-2f49-46bd-9825-e630220fd8aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.103983] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 744.110547] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449784, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.216170] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 744.335432] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048a5c6f-8317-4a62-a88b-586f4b2c09fb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.345626] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1c9c51-d62e-4433-9a3e-395dbf7b8bcc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.384136] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3109f356-4729-4b94-8d08-7c8d122eca60 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.394066] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5968ca2b-4724-41ef-a80b-07dae1337e36 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.412421] env[65503]: DEBUG nova.compute.provider_tree [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.422374] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Releasing lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.422578] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Received event network-vif-plugged-633bd812-c51f-4ae0-bab2-ced08b56a04b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 744.422725] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.422918] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.423044] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.423298] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] No waiting events found dispatching network-vif-plugged-633bd812-c51f-4ae0-bab2-ced08b56a04b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 744.423503] env[65503]: WARNING nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Received unexpected event network-vif-plugged-633bd812-c51f-4ae0-bab2-ced08b56a04b for instance with vm_state active and task_state None. [ 744.423695] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Received event network-changed-633bd812-c51f-4ae0-bab2-ced08b56a04b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 744.423838] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Refreshing instance network info cache due to event network-changed-633bd812-c51f-4ae0-bab2-ced08b56a04b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 744.424049] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.424192] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquired lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.424344] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Refreshing network info cache for port 633bd812-c51f-4ae0-bab2-ced08b56a04b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 744.440141] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.529024] env[65503]: DEBUG nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 744.548397] env[65503]: INFO nova.compute.manager [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Took 34.03 seconds to build instance. [ 744.607926] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626129} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.608936] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 7ed036d1-8188-4aab-9d6d-8d7e46147812/7ed036d1-8188-4aab-9d6d-8d7e46147812.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 744.609183] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.610172] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e18ef85-b657-4eed-afcf-8fbcfb4d88f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.621182] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 744.621182] env[65503]: value = "task-4449786" [ 744.621182] env[65503]: _type = "Task" [ 744.621182] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.633077] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449786, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.917608] env[65503]: DEBUG nova.scheduler.client.report [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 744.931016] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 744.931016] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 744.931016] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 744.998800] env[65503]: DEBUG nova.network.neutron [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 745.050390] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5bc49589-d592-4d2b-8843-349b255cacbb tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.541s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.064774] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.133855] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449786, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069157} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.134158] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.135060] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef59142-c6e9-42a9-914c-80ab7a51fae6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.164748] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 7ed036d1-8188-4aab-9d6d-8d7e46147812/7ed036d1-8188-4aab-9d6d-8d7e46147812.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.169630] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07314e02-915c-40df-bd98-f6ddf4ccba60 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.185644] env[65503]: DEBUG nova.network.neutron [-] [instance: eec6a484-ab00-402e-a369-c3009065c553] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 745.192872] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 745.192872] env[65503]: value = "task-4449787" [ 745.192872] env[65503]: _type = "Task" [ 745.192872] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.202081] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449787, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.255151] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 745.255569] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 745.398361] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 745.398361] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 745.398361] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 745.424781] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.827s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.426118] env[65503]: DEBUG nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 745.434688] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.059s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.436946] env[65503]: INFO nova.compute.claims [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.501346] env[65503]: INFO nova.compute.manager [-] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Took 1.63 seconds to deallocate network for instance. [ 745.607351] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updated VIF entry in instance network info cache for port 633bd812-c51f-4ae0-bab2-ced08b56a04b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 745.608877] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [{"id": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "address": "fa:16:3e:b6:69:8c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633bd812-c5", "ovs_interfaceid": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 745.691856] env[65503]: INFO nova.compute.manager [-] [instance: eec6a484-ab00-402e-a369-c3009065c553] Took 1.69 seconds to deallocate network for instance. [ 745.714561] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449787, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.936953] env[65503]: DEBUG nova.compute.utils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 745.938815] env[65503]: DEBUG nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 745.939194] env[65503]: DEBUG nova.network.neutron [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 745.941497] env[65503]: WARNING neutronclient.v2_0.client [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 745.942492] env[65503]: WARNING neutronclient.v2_0.client [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 745.943343] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 745.943805] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 745.981022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.981022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.011029] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.089187] env[65503]: DEBUG nova.policy [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24818b2f0a334c68923ff0751831de67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07d60cd1e83c4ed5b6f5316646dda385', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 746.117073] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Releasing lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.117073] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Received event network-vif-plugged-7dd31600-1222-4005-ad0f-74f4f93fcd4b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 746.117073] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.117073] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.117073] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.117073] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] No waiting events found dispatching network-vif-plugged-7dd31600-1222-4005-ad0f-74f4f93fcd4b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 746.117073] env[65503]: WARNING nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Received unexpected event network-vif-plugged-7dd31600-1222-4005-ad0f-74f4f93fcd4b for instance with vm_state building and task_state spawning. [ 746.117073] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Received event network-changed-7dd31600-1222-4005-ad0f-74f4f93fcd4b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 746.117073] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Refreshing instance network info cache due to event network-changed-7dd31600-1222-4005-ad0f-74f4f93fcd4b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 746.117073] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.117073] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquired lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.117073] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Refreshing network info cache for port 7dd31600-1222-4005-ad0f-74f4f93fcd4b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 746.206860] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.213195] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449787, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.291259] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 746.291259] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870312', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'name': 'volume-b697b167-e710-40c5-8992-c508c36ac340', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db942a2d-671b-4036-a80b-d2375145cd29', 'attached_at': '', 'detached_at': '', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'serial': 'b697b167-e710-40c5-8992-c508c36ac340'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 746.292100] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f00644-b5df-4e13-9b1b-6644afee19b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.321324] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1aa0a9-39ac-428c-a08b-f5da52b4b98a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.350727] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] volume-b697b167-e710-40c5-8992-c508c36ac340/volume-b697b167-e710-40c5-8992-c508c36ac340.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.351381] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef0ca718-f1cd-41fe-8bc0-41cce972ee9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.371724] env[65503]: DEBUG oslo_vmware.api [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Waiting for the task: (returnval){ [ 746.371724] env[65503]: value = "task-4449788" [ 746.371724] env[65503]: _type = "Task" [ 746.371724] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.381369] env[65503]: DEBUG oslo_vmware.api [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449788, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.402487] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.403059] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.455270] env[65503]: DEBUG nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 746.485244] env[65503]: DEBUG nova.compute.utils [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 746.612701] env[65503]: DEBUG nova.network.neutron [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Successfully created port: 49fc5f6e-b681-4971-88d0-255d42df1686 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 746.620508] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 746.621558] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 746.621639] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 746.716078] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449787, 'name': ReconfigVM_Task, 'duration_secs': 1.053839} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.716078] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 7ed036d1-8188-4aab-9d6d-8d7e46147812/7ed036d1-8188-4aab-9d6d-8d7e46147812.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.716078] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ca983ee-924f-4b80-ace9-6c7b777e4e8f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.726087] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 746.726087] env[65503]: value = "task-4449789" [ 746.726087] env[65503]: _type = "Task" [ 746.726087] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.737295] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449789, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.813294] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.813593] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.884564] env[65503]: DEBUG oslo_vmware.api [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449788, 'name': ReconfigVM_Task, 'duration_secs': 0.406448} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.885048] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Reconfigured VM instance instance-0000000a to attach disk [datastore2] volume-b697b167-e710-40c5-8992-c508c36ac340/volume-b697b167-e710-40c5-8992-c508c36ac340.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.889553] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e5a7a4b-3098-4677-9850-e4aaef6f4bc4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.905212] env[65503]: DEBUG nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 746.909614] env[65503]: DEBUG oslo_vmware.api [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Waiting for the task: (returnval){ [ 746.909614] env[65503]: value = "task-4449790" [ 746.909614] env[65503]: _type = "Task" [ 746.909614] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.921127] env[65503]: DEBUG oslo_vmware.api [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449790, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.951104] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 746.951634] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 746.989830] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.012021] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aede66c-ea52-472b-b7b0-3c6760b27770 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.020751] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f13ac6-ce0c-429d-9741-f51a19004819 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.059848] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a132556e-49df-466d-aa17-152b040901ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.069623] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a1cc61-fcbb-448a-a580-499f3f526101 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.086039] env[65503]: DEBUG nova.compute.provider_tree [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.221994] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 747.223041] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 747.223135] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 747.241204] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449789, 'name': Rename_Task, 'duration_secs': 0.151585} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.241413] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 747.241615] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-460b781d-95a5-46e5-82b0-27ff7f4a8d9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.251601] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 747.251601] env[65503]: value = "task-4449791" [ 747.251601] env[65503]: _type = "Task" [ 747.251601] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.260491] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.317578] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Updated VIF entry in instance network info cache for port 7dd31600-1222-4005-ad0f-74f4f93fcd4b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 747.317951] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Updating instance_info_cache with network_info: [{"id": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "address": "fa:16:3e:7e:79:ca", "network": {"id": "88e75d0a-b4c5-4ba5-bc83-f6baa008b75e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-195385216-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7b84f071d5d49168314c6baf24748a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89463199-7ddf-4ee7-b485-1629a75b4b8f", "external-id": "nsx-vlan-transportzone-302", "segmentation_id": 302, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd31600-12", "ovs_interfaceid": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 747.319594] env[65503]: DEBUG nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 747.433978] env[65503]: DEBUG oslo_vmware.api [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449790, 'name': ReconfigVM_Task, 'duration_secs': 0.191744} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.433978] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870312', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'name': 'volume-b697b167-e710-40c5-8992-c508c36ac340', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db942a2d-671b-4036-a80b-d2375145cd29', 'attached_at': '', 'detached_at': '', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'serial': 'b697b167-e710-40c5-8992-c508c36ac340'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 747.448940] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.468262] env[65503]: DEBUG nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 747.500423] env[65503]: DEBUG nova.virt.hardware [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 747.501186] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3516ec-9047-48d5-b0e2-3e031f9c25c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.510546] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094fa249-41df-40c6-84ae-5e782e940dd8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.589389] env[65503]: DEBUG nova.scheduler.client.report [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.733037] env[65503]: DEBUG nova.compute.manager [req-e3684787-0b5c-4327-9d30-5f76250ef5ef req-94fbe5ab-aeec-4978-83ac-2b5756a1ca75 service nova] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Received event network-vif-deleted-5a0ecec2-8866-4131-aa0b-e63ba349190f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 747.762666] env[65503]: DEBUG oslo_vmware.api [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449791, 'name': PowerOnVM_Task, 'duration_secs': 0.510688} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.762986] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.763344] env[65503]: INFO nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Took 12.03 seconds to spawn the instance on the hypervisor. [ 747.763569] env[65503]: DEBUG nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 747.764473] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db967cf-1530-4e22-9b9d-f2a2cc48c389 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.823285] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Releasing lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.823836] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Received event network-vif-plugged-f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 747.824247] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "b6cda94b-2894-4cf0-8522-6593df9723bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.824644] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.824947] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.825258] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] No waiting events found dispatching network-vif-plugged-f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 747.825569] env[65503]: WARNING nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Received unexpected event network-vif-plugged-f5e5c61e-0df6-47d9-ab15-21c80b68c833 for instance with vm_state building and task_state spawning. [ 747.825868] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Received event network-changed-f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 747.826176] env[65503]: DEBUG nova.compute.manager [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Refreshing instance network info cache due to event network-changed-f5e5c61e-0df6-47d9-ab15-21c80b68c833. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 747.826501] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquiring lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.826747] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Acquired lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.827039] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Refreshing network info cache for port f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 747.848349] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.060885] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.061233] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.061909] env[65503]: INFO nova.compute.manager [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Attaching volume abf7c28d-f540-4663-b4f2-d36b819413f6 to /dev/sdb [ 748.077890] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "1bda7a65-0231-4753-9762-43e9b13bd893" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.078209] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "1bda7a65-0231-4753-9762-43e9b13bd893" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.096631] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.096631] env[65503]: DEBUG nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 748.101000] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.386s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.101000] env[65503]: INFO nova.compute.claims [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.132249] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42be4d4-be69-401f-85d5-a9d1091ef154 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.143917] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa29637-2e88-4bef-a9a7-cb8821089339 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.163214] env[65503]: DEBUG nova.virt.block_device [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Updating existing volume attachment record: 6d158c0b-5e7b-4eb2-8849-9af93e835471 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 748.201987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "f840b178-fd54-4c84-808c-a14c99a5ecdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.201987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.255161] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "62a18449-7cec-4785-a340-d0450adc8044" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.256878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "62a18449-7cec-4785-a340-d0450adc8044" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.283129] env[65503]: INFO nova.compute.manager [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Took 25.90 seconds to build instance. [ 748.324779] env[65503]: DEBUG nova.compute.manager [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Received event network-changed-2da53c45-1677-47a0-99c7-20ed6ebfc0ad {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 748.325183] env[65503]: DEBUG nova.compute.manager [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Refreshing instance network info cache due to event network-changed-2da53c45-1677-47a0-99c7-20ed6ebfc0ad. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 748.325395] env[65503]: DEBUG oslo_concurrency.lockutils [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Acquiring lock "refresh_cache-7ed036d1-8188-4aab-9d6d-8d7e46147812" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.325557] env[65503]: DEBUG oslo_concurrency.lockutils [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Acquired lock "refresh_cache-7ed036d1-8188-4aab-9d6d-8d7e46147812" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.325739] env[65503]: DEBUG nova.network.neutron [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Refreshing network info cache for port 2da53c45-1677-47a0-99c7-20ed6ebfc0ad {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 748.333105] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 748.333325] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 748.333823] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 748.453239] env[65503]: DEBUG nova.network.neutron [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Successfully updated port: 49fc5f6e-b681-4971-88d0-255d42df1686 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 748.487585] env[65503]: DEBUG nova.objects.instance [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lazy-loading 'flavor' on Instance uuid db942a2d-671b-4036-a80b-d2375145cd29 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 748.578170] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 748.578615] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 748.585844] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 748.607683] env[65503]: DEBUG nova.compute.utils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 748.612250] env[65503]: DEBUG nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 748.612250] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 748.612250] env[65503]: WARNING neutronclient.v2_0.client [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 748.612250] env[65503]: WARNING neutronclient.v2_0.client [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 748.612629] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 748.612767] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 748.668218] env[65503]: WARNING neutronclient.v2_0.client [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 748.669009] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 748.669388] env[65503]: WARNING openstack [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 748.683347] env[65503]: DEBUG nova.policy [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '007d95ce82f34492a4cd26bdf1df313a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae68676f87fd4edc802b2e4b4917ceec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 748.703281] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 748.725610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "34008711-b51b-467b-b972-bfda1023d696" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.725800] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "34008711-b51b-467b-b972-bfda1023d696" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.726048] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "34008711-b51b-467b-b972-bfda1023d696-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.726162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "34008711-b51b-467b-b972-bfda1023d696-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.726395] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "34008711-b51b-467b-b972-bfda1023d696-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.730329] env[65503]: INFO nova.compute.manager [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Terminating instance [ 748.784021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "a57486e1-82e3-48d5-99fe-c89b300a2136" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.784021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.792946] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bb0515-3f51-4623-b440-e367feaa6f36 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.413s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.829211] env[65503]: WARNING neutronclient.v2_0.client [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 748.830280] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 748.830924] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 748.957268] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.957676] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquired lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.957910] env[65503]: DEBUG nova.network.neutron [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 748.999226] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7d4f3bd1-48f8-425d-9cfe-253035f7d418 tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lock "db942a2d-671b-4036-a80b-d2375145cd29" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.362s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.020427] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Updated VIF entry in instance network info cache for port f5e5c61e-0df6-47d9-ab15-21c80b68c833. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 749.020811] env[65503]: DEBUG nova.network.neutron [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Updating instance_info_cache with network_info: [{"id": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "address": "fa:16:3e:bc:42:78", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e5c61e-0d", "ovs_interfaceid": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 749.114024] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.122359] env[65503]: DEBUG nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 749.217295] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Successfully created port: 59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 749.234339] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.236541] env[65503]: DEBUG nova.compute.manager [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 749.236746] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.238546] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8b85de-6734-45f4-bf37-a5e883c07ff8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.257352] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.259462] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 749.260018] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 749.269136] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb66aa40-3674-49b1-ad44-0a2c7766239c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.282421] env[65503]: DEBUG oslo_vmware.api [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 749.282421] env[65503]: value = "task-4449795" [ 749.282421] env[65503]: _type = "Task" [ 749.282421] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.293647] env[65503]: DEBUG oslo_vmware.api [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.297286] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 749.360171] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.360171] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.360171] env[65503]: INFO nova.compute.manager [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Rebooting instance [ 749.438777] env[65503]: WARNING neutronclient.v2_0.client [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 749.439525] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 749.439844] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 749.461198] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 749.461542] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 749.525421] env[65503]: DEBUG oslo_concurrency.lockutils [req-c41e6381-c7af-4629-8ea0-b17c9c9db1c1 req-4f52672c-ed38-489e-a518-00dbde760d14 service nova] Releasing lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.561334] env[65503]: DEBUG nova.network.neutron [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 749.616804] env[65503]: DEBUG nova.network.neutron [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Updated VIF entry in instance network info cache for port 2da53c45-1677-47a0-99c7-20ed6ebfc0ad. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 749.617176] env[65503]: DEBUG nova.network.neutron [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Updating instance_info_cache with network_info: [{"id": "2da53c45-1677-47a0-99c7-20ed6ebfc0ad", "address": "fa:16:3e:a1:e4:4b", "network": {"id": "7d2c1d69-8f8c-4413-98d4-7389dd23bffa", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2110345902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "86a9eddff6df487bb469066f7e1a0dde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2da53c45-16", "ovs_interfaceid": "2da53c45-1677-47a0-99c7-20ed6ebfc0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 749.635255] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 749.635640] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 749.646044] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Successfully created port: e646f6df-8d32-4092-a47b-63baf184da39 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 749.792379] env[65503]: DEBUG oslo_vmware.api [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449795, 'name': PowerOffVM_Task, 'duration_secs': 0.222706} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.792379] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.792379] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.793282] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5afcf0e-2966-4627-ac4c-dc9c778d1506 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.795558] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb70c5e-5846-4400-841e-8e9f5e6c1a9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.808936] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0eb875-b680-4aef-800a-76d4e069bac7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.844980] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.845805] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c340521e-6852-4ab3-9c8f-9b9af5de5df9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.855457] env[65503]: WARNING neutronclient.v2_0.client [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 749.857449] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 749.857449] env[65503]: WARNING openstack [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 749.867818] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f93eb82-beaa-4a28-99cb-0aac38bd7a57 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.876200] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 749.876425] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 749.876599] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Deleting the datastore file [datastore2] 34008711-b51b-467b-b972-bfda1023d696 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.877311] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06f4afa6-e625-4851-81c0-3b86a80d6889 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.888154] env[65503]: DEBUG nova.compute.provider_tree [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.891777] env[65503]: DEBUG oslo_vmware.api [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for the task: (returnval){ [ 749.891777] env[65503]: value = "task-4449797" [ 749.891777] env[65503]: _type = "Task" [ 749.891777] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.899056] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.899236] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquired lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.899642] env[65503]: DEBUG nova.network.neutron [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 749.904951] env[65503]: DEBUG oslo_vmware.api [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.126580] env[65503]: DEBUG oslo_concurrency.lockutils [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Releasing lock "refresh_cache-7ed036d1-8188-4aab-9d6d-8d7e46147812" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.126912] env[65503]: DEBUG nova.compute.manager [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Received event network-changed-1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 750.127088] env[65503]: DEBUG nova.compute.manager [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Refreshing instance network info cache due to event network-changed-1e1fc8a1-f7e8-49f4-b328-b7f029f59874. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 750.127277] env[65503]: DEBUG oslo_concurrency.lockutils [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Acquiring lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.127409] env[65503]: DEBUG oslo_concurrency.lockutils [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Acquired lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.127561] env[65503]: DEBUG nova.network.neutron [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Refreshing network info cache for port 1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 750.134753] env[65503]: DEBUG nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 750.162422] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 750.163047] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 750.163047] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 750.163047] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 750.163196] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 750.163287] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 750.163753] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 750.163753] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 750.163852] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 750.163922] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 750.164096] env[65503]: DEBUG nova.virt.hardware [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 750.165106] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc63bed2-90da-4ab1-8dd2-9c1829954fdc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.174298] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6a33a1-85ec-47e0-ac86-fa317dd01095 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.393828] env[65503]: DEBUG nova.scheduler.client.report [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 750.413254] env[65503]: WARNING neutronclient.v2_0.client [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 750.414157] env[65503]: WARNING openstack [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 750.414651] env[65503]: WARNING openstack [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 750.422429] env[65503]: DEBUG oslo_vmware.api [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Task: {'id': task-4449797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.469357} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.424505] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.424505] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.424505] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.424505] env[65503]: INFO nova.compute.manager [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] [instance: 34008711-b51b-467b-b972-bfda1023d696] Took 1.19 seconds to destroy the instance on the hypervisor. [ 750.424505] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 750.424794] env[65503]: DEBUG nova.compute.manager [-] [instance: 34008711-b51b-467b-b972-bfda1023d696] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 750.424794] env[65503]: DEBUG nova.network.neutron [-] [instance: 34008711-b51b-467b-b972-bfda1023d696] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 750.424931] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 750.425416] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 750.425663] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 750.536821] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 750.603012] env[65503]: DEBUG nova.network.neutron [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Updating instance_info_cache with network_info: [{"id": "49fc5f6e-b681-4971-88d0-255d42df1686", "address": "fa:16:3e:d8:4a:5d", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49fc5f6e-b6", "ovs_interfaceid": "49fc5f6e-b681-4971-88d0-255d42df1686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 750.631221] env[65503]: WARNING neutronclient.v2_0.client [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 750.632121] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 750.632467] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 750.900245] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.801s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.900433] env[65503]: DEBUG nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 750.905275] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.465s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.907144] env[65503]: INFO nova.compute.claims [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.917988] env[65503]: WARNING openstack [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 750.918393] env[65503]: WARNING openstack [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 751.058588] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 751.060871] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 751.095050] env[65503]: WARNING neutronclient.v2_0.client [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 751.095764] env[65503]: WARNING openstack [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 751.096141] env[65503]: WARNING openstack [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 751.105722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Releasing lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.106131] env[65503]: DEBUG nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Instance network_info: |[{"id": "49fc5f6e-b681-4971-88d0-255d42df1686", "address": "fa:16:3e:d8:4a:5d", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49fc5f6e-b6", "ovs_interfaceid": "49fc5f6e-b681-4971-88d0-255d42df1686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 751.106615] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:4a:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49fc5f6e-b681-4971-88d0-255d42df1686', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.114685] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Creating folder: Project (07d60cd1e83c4ed5b6f5316646dda385). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.115021] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d941ca47-6a02-4324-9a2e-ab168f7fa37f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.135020] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Created folder: Project (07d60cd1e83c4ed5b6f5316646dda385) in parent group-v870190. [ 751.135020] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Creating folder: Instances. Parent ref: group-v870318. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.135020] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70dcc05b-6f7d-4543-bb0a-ec0ce2066731 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.145306] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Created folder: Instances in parent group-v870318. [ 751.145865] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 751.146217] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.146559] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d147a017-09ba-4bb8-8089-8408303ecfc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.174226] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.174226] env[65503]: value = "task-4449801" [ 751.174226] env[65503]: _type = "Task" [ 751.174226] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.181969] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449801, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.268517] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Successfully updated port: 59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 751.303147] env[65503]: WARNING neutronclient.v2_0.client [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 751.303358] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 751.304314] env[65503]: WARNING openstack [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 751.334884] env[65503]: DEBUG nova.network.neutron [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Updating instance_info_cache with network_info: [{"id": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "address": "fa:16:3e:7e:79:ca", "network": {"id": "88e75d0a-b4c5-4ba5-bc83-f6baa008b75e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-195385216-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7b84f071d5d49168314c6baf24748a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89463199-7ddf-4ee7-b485-1629a75b4b8f", "external-id": "nsx-vlan-transportzone-302", "segmentation_id": 302, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd31600-12", "ovs_interfaceid": "7dd31600-1222-4005-ad0f-74f4f93fcd4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 751.363092] env[65503]: DEBUG nova.network.neutron [-] [instance: 34008711-b51b-467b-b972-bfda1023d696] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 751.407133] env[65503]: DEBUG nova.network.neutron [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updated VIF entry in instance network info cache for port 1e1fc8a1-f7e8-49f4-b328-b7f029f59874. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 751.407485] env[65503]: DEBUG nova.network.neutron [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updating instance_info_cache with network_info: [{"id": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "address": "fa:16:3e:cf:06:d8", "network": {"id": "234e0d20-0522-4720-b75f-e1246236d495", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1433648354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8d373b14fc34ee69c50f9f7ce58c888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e1fc8a1-f7", "ovs_interfaceid": "1e1fc8a1-f7e8-49f4-b328-b7f029f59874", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 751.415107] env[65503]: DEBUG nova.compute.utils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 751.417113] env[65503]: DEBUG nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 751.417316] env[65503]: DEBUG nova.network.neutron [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 751.417764] env[65503]: WARNING neutronclient.v2_0.client [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 751.417959] env[65503]: WARNING neutronclient.v2_0.client [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 751.418598] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 751.418935] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 751.467771] env[65503]: DEBUG nova.policy [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9337f2cb77a24772aba3ef0eb341f2d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd6e7f469b7d408fae0621171c096f0a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 751.541907] env[65503]: DEBUG nova.compute.manager [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Received event network-vif-plugged-49fc5f6e-b681-4971-88d0-255d42df1686 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 751.542168] env[65503]: DEBUG oslo_concurrency.lockutils [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Acquiring lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.542380] env[65503]: DEBUG oslo_concurrency.lockutils [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.542598] env[65503]: DEBUG oslo_concurrency.lockutils [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.542987] env[65503]: DEBUG nova.compute.manager [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] No waiting events found dispatching network-vif-plugged-49fc5f6e-b681-4971-88d0-255d42df1686 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 751.543168] env[65503]: WARNING nova.compute.manager [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Received unexpected event network-vif-plugged-49fc5f6e-b681-4971-88d0-255d42df1686 for instance with vm_state building and task_state spawning. [ 751.543339] env[65503]: DEBUG nova.compute.manager [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Received event network-changed-49fc5f6e-b681-4971-88d0-255d42df1686 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 751.543487] env[65503]: DEBUG nova.compute.manager [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Refreshing instance network info cache due to event network-changed-49fc5f6e-b681-4971-88d0-255d42df1686. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 751.543666] env[65503]: DEBUG oslo_concurrency.lockutils [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Acquiring lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.543857] env[65503]: DEBUG oslo_concurrency.lockutils [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Acquired lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.543994] env[65503]: DEBUG nova.network.neutron [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Refreshing network info cache for port 49fc5f6e-b681-4971-88d0-255d42df1686 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 751.689191] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449801, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.837873] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Releasing lock "refresh_cache-75578ccd-2b34-4948-9afa-ac94e9fd8b4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.859552] env[65503]: DEBUG nova.network.neutron [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Successfully created port: 2a598d20-dc23-42a4-9d99-962df4c4d391 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 751.866835] env[65503]: INFO nova.compute.manager [-] [instance: 34008711-b51b-467b-b972-bfda1023d696] Took 1.44 seconds to deallocate network for instance. [ 751.896037] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "7ed036d1-8188-4aab-9d6d-8d7e46147812" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.896447] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.896661] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "7ed036d1-8188-4aab-9d6d-8d7e46147812-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.896840] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.897011] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.900098] env[65503]: INFO nova.compute.manager [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Terminating instance [ 751.911017] env[65503]: DEBUG oslo_concurrency.lockutils [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] Releasing lock "refresh_cache-8f0bf665-b21b-42ed-816d-69dee2f40654" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.911589] env[65503]: DEBUG nova.compute.manager [req-09a99f40-1138-4e3c-8bfb-0c5d21c05cb3 req-e8c35c97-8e2e-49c6-8dce-d704fecece46 service nova] [instance: eec6a484-ab00-402e-a369-c3009065c553] Received event network-vif-deleted-984474f0-e03b-413b-8c1f-8e553672a7df {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 751.927878] env[65503]: DEBUG nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 751.954655] env[65503]: DEBUG nova.compute.manager [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Received event network-changed-f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 751.955193] env[65503]: DEBUG nova.compute.manager [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Refreshing instance network info cache due to event network-changed-f5e5c61e-0df6-47d9-ab15-21c80b68c833. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 751.955576] env[65503]: DEBUG oslo_concurrency.lockutils [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Acquiring lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.955778] env[65503]: DEBUG oslo_concurrency.lockutils [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Acquired lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.955939] env[65503]: DEBUG nova.network.neutron [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Refreshing network info cache for port f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 752.046908] env[65503]: WARNING neutronclient.v2_0.client [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 752.047913] env[65503]: WARNING openstack [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 752.048343] env[65503]: WARNING openstack [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 752.190559] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449801, 'name': CreateVM_Task, 'duration_secs': 0.582027} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.190870] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 752.191242] env[65503]: WARNING neutronclient.v2_0.client [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 752.191645] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.191807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.192128] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 752.192392] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-806d20cd-b9a3-4fdd-afc1-b81bc1ae4b7c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.198194] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 752.198194] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d89ed-5924-c4fd-4de5-07334995c18b" [ 752.198194] env[65503]: _type = "Task" [ 752.198194] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.209491] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d89ed-5924-c4fd-4de5-07334995c18b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.342536] env[65503]: DEBUG nova.compute.manager [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 752.343519] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d4969e-c4a6-44d4-bc91-4f0a9bc28fc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.375151] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.405863] env[65503]: DEBUG nova.compute.manager [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 752.406102] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 752.407056] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0be14cf-2533-4a55-ab98-6aa93231374f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.415430] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 752.415690] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-652179fe-4c5f-441d-b9a9-8a9958756995 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.422660] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac880f0d-89c2-4442-8cfe-308846df0f5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.427237] env[65503]: DEBUG oslo_vmware.api [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 752.427237] env[65503]: value = "task-4449802" [ 752.427237] env[65503]: _type = "Task" [ 752.427237] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.437131] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b000a19f-1466-4649-a92e-a8d0623dce55 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.443696] env[65503]: DEBUG oslo_vmware.api [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.472781] env[65503]: WARNING neutronclient.v2_0.client [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 752.473483] env[65503]: WARNING openstack [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 752.473835] env[65503]: WARNING openstack [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 752.482863] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77eabac1-a85d-49de-a0d3-0726196fda18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.492667] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f6dd15-a9c9-4f6a-97aa-31b251d617a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.508182] env[65503]: DEBUG nova.compute.provider_tree [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.711411] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d89ed-5924-c4fd-4de5-07334995c18b, 'name': SearchDatastore_Task, 'duration_secs': 0.010783} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.711766] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.711994] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.712238] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.712376] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.712572] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.712918] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c1976fa-ec81-4e74-874d-54dd48ea47d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.722608] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.722793] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.723528] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e3dd393-de66-4509-ad83-c37885d1dbd9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.729428] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 752.729428] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521bdf6c-13ca-12de-27cf-71461e786e0b" [ 752.729428] env[65503]: _type = "Task" [ 752.729428] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.737177] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521bdf6c-13ca-12de-27cf-71461e786e0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.938025] env[65503]: DEBUG oslo_vmware.api [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449802, 'name': PowerOffVM_Task, 'duration_secs': 0.202391} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.938359] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 752.938593] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 752.938991] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-676f2a24-9df5-4153-a3e8-fd0ceb22c4d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.941790] env[65503]: DEBUG nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 752.969751] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 752.969984] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 752.970148] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 752.970325] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 752.970463] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 752.970602] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 752.970805] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 752.970953] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 752.971125] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 752.971281] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 752.971448] env[65503]: DEBUG nova.virt.hardware [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 752.972382] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f4db31-69d2-4ba5-82a1-2701fa522382 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.981758] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34219a2d-b6e4-4f6b-ae36-613c1befaf5c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.011033] env[65503]: DEBUG nova.scheduler.client.report [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.015895] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 753.016123] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 753.016302] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Deleting the datastore file [datastore1] 7ed036d1-8188-4aab-9d6d-8d7e46147812 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.016813] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c4aaf98-2518-4c8f-bd13-bffff9a00505 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.024066] env[65503]: DEBUG oslo_vmware.api [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for the task: (returnval){ [ 753.024066] env[65503]: value = "task-4449804" [ 753.024066] env[65503]: _type = "Task" [ 753.024066] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.035488] env[65503]: DEBUG oslo_vmware.api [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.241886] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521bdf6c-13ca-12de-27cf-71461e786e0b, 'name': SearchDatastore_Task, 'duration_secs': 0.009538} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.242588] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7eb301e-ae5a-4060-8427-3994d4e37cd0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.249397] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 753.249397] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ef34b-0f25-955b-08a1-ace311f3f812" [ 753.249397] env[65503]: _type = "Task" [ 753.249397] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.255145] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 753.255371] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870317', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'name': 'volume-abf7c28d-f540-4663-b4f2-d36b819413f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9acbc312-a3a2-4758-87cd-5576c4f1f8dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'serial': 'abf7c28d-f540-4663-b4f2-d36b819413f6'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 753.256181] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344429db-ea69-4f6b-9429-d3230d91ef63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.261572] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ef34b-0f25-955b-08a1-ace311f3f812, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.275884] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a29876-fb96-4c7d-b294-37eba606635e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.302738] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] volume-abf7c28d-f540-4663-b4f2-d36b819413f6/volume-abf7c28d-f540-4663-b4f2-d36b819413f6.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.303032] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d3e8849-596a-4df6-ae9e-10a64cf428b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.322771] env[65503]: DEBUG oslo_vmware.api [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 753.322771] env[65503]: value = "task-4449805" [ 753.322771] env[65503]: _type = "Task" [ 753.322771] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.334381] env[65503]: DEBUG oslo_vmware.api [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449805, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.361589] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8c2447-11a8-40d1-86ba-8295ef540146 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.369771] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Doing hard reboot of VM {{(pid=65503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 753.370130] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-aec7a157-14b0-49a3-abe7-73b7481915aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.377694] env[65503]: DEBUG oslo_vmware.api [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 753.377694] env[65503]: value = "task-4449806" [ 753.377694] env[65503]: _type = "Task" [ 753.377694] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.387446] env[65503]: DEBUG oslo_vmware.api [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449806, 'name': ResetVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.488214] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Successfully updated port: e646f6df-8d32-4092-a47b-63baf184da39 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 753.499773] env[65503]: DEBUG nova.network.neutron [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Successfully updated port: 2a598d20-dc23-42a4-9d99-962df4c4d391 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 753.517987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.519522] env[65503]: DEBUG nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 753.522383] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.458s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.525948] env[65503]: INFO nova.compute.claims [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.544334] env[65503]: DEBUG oslo_vmware.api [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Task: {'id': task-4449804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392536} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.544484] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.544686] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 753.545265] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.545265] env[65503]: INFO nova.compute.manager [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Took 1.14 seconds to destroy the instance on the hypervisor. [ 753.545265] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 753.545529] env[65503]: DEBUG nova.compute.manager [-] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 753.545628] env[65503]: DEBUG nova.network.neutron [-] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 753.545910] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 753.546483] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 753.548450] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 753.645324] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 753.737679] env[65503]: WARNING openstack [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 753.738319] env[65503]: WARNING openstack [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 753.761857] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ef34b-0f25-955b-08a1-ace311f3f812, 'name': SearchDatastore_Task, 'duration_secs': 0.016704} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.762148] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.762406] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ca5962fe-3e41-4fae-8860-90fa7278e0fc/ca5962fe-3e41-4fae-8860-90fa7278e0fc.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.762746] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8afb6f6-c978-4549-930c-db919cfe591d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.771982] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 753.771982] env[65503]: value = "task-4449807" [ 753.771982] env[65503]: _type = "Task" [ 753.771982] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.643100] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.643322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.643432] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 754.644369] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "refresh_cache-585a3d16-ee0c-4b71-9c0d-17b4bc968d09" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.644539] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "refresh_cache-585a3d16-ee0c-4b71-9c0d-17b4bc968d09" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.647449] env[65503]: DEBUG nova.network.neutron [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 754.649602] env[65503]: DEBUG nova.compute.utils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 754.656414] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.656414] env[65503]: WARNING oslo_vmware.common.loopingcall [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] task run outlasted interval by 0.38366500000000003 sec [ 754.663837] env[65503]: WARNING openstack [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 754.664214] env[65503]: WARNING openstack [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 754.674400] env[65503]: DEBUG nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 754.690251] env[65503]: DEBUG nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 754.690521] env[65503]: DEBUG nova.network.neutron [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 754.690845] env[65503]: WARNING neutronclient.v2_0.client [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 754.691156] env[65503]: WARNING neutronclient.v2_0.client [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 754.691784] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 754.692172] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 754.714633] env[65503]: DEBUG oslo_vmware.api [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449805, 'name': ReconfigVM_Task, 'duration_secs': 0.442217} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.721824] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Reconfigured VM instance instance-00000025 to attach disk [datastore2] volume-abf7c28d-f540-4663-b4f2-d36b819413f6/volume-abf7c28d-f540-4663-b4f2-d36b819413f6.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.727363] env[65503]: DEBUG oslo_vmware.api [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449806, 'name': ResetVM_Task, 'duration_secs': 0.257979} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.727655] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671071} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.728212] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01f0be0b-e49b-4926-a3ed-eaf2d262aea4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.738657] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Did hard reboot of VM {{(pid=65503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 754.738961] env[65503]: DEBUG nova.compute.manager [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 754.739392] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ca5962fe-3e41-4fae-8860-90fa7278e0fc/ca5962fe-3e41-4fae-8860-90fa7278e0fc.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.739776] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.751812] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271c6be6-48b7-4c57-8f32-482149982a80 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.754296] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25b4720b-d8fe-4a91-b484-57e829a97295 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.757418] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received event network-vif-plugged-59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 754.757619] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Acquiring lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.757816] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.757972] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.758142] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] No waiting events found dispatching network-vif-plugged-59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 754.758301] env[65503]: WARNING nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received unexpected event network-vif-plugged-59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc for instance with vm_state building and task_state spawning. [ 754.758436] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received event network-changed-59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 754.758581] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Refreshing instance network info cache due to event network-changed-59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 754.758738] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Acquiring lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.763116] env[65503]: DEBUG nova.policy [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffb37a9a41514e0282bdb29ab1486d6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bca4e8b44ecb4c24803c35c24892bdc3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 754.768331] env[65503]: WARNING neutronclient.v2_0.client [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 754.768953] env[65503]: WARNING openstack [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 754.769673] env[65503]: WARNING openstack [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 754.784769] env[65503]: DEBUG nova.compute.manager [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: 34008711-b51b-467b-b972-bfda1023d696] Received event network-vif-deleted-639772e3-599d-4f7e-81ad-21f2c2f49bbe {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 754.784952] env[65503]: DEBUG nova.compute.manager [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received event network-vif-plugged-e646f6df-8d32-4092-a47b-63baf184da39 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 754.784952] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Acquiring lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.784952] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.784952] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.785799] env[65503]: DEBUG nova.compute.manager [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] No waiting events found dispatching network-vif-plugged-e646f6df-8d32-4092-a47b-63baf184da39 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 754.785799] env[65503]: WARNING nova.compute.manager [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received unexpected event network-vif-plugged-e646f6df-8d32-4092-a47b-63baf184da39 for instance with vm_state building and task_state spawning. [ 754.785907] env[65503]: DEBUG nova.compute.manager [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received event network-changed-e646f6df-8d32-4092-a47b-63baf184da39 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 754.786308] env[65503]: DEBUG nova.compute.manager [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Refreshing instance network info cache due to event network-changed-e646f6df-8d32-4092-a47b-63baf184da39. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 754.786308] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Acquiring lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.786864] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 754.786864] env[65503]: value = "task-4449809" [ 754.786864] env[65503]: _type = "Task" [ 754.786864] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.789358] env[65503]: DEBUG oslo_vmware.api [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 754.789358] env[65503]: value = "task-4449808" [ 754.789358] env[65503]: _type = "Task" [ 754.789358] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.803725] env[65503]: WARNING neutronclient.v2_0.client [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 754.806091] env[65503]: WARNING openstack [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 754.806091] env[65503]: WARNING openstack [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 754.823199] env[65503]: DEBUG oslo_vmware.api [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.823531] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.968752] env[65503]: DEBUG nova.network.neutron [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Updated VIF entry in instance network info cache for port 49fc5f6e-b681-4971-88d0-255d42df1686. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 754.969118] env[65503]: DEBUG nova.network.neutron [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Updating instance_info_cache with network_info: [{"id": "49fc5f6e-b681-4971-88d0-255d42df1686", "address": "fa:16:3e:d8:4a:5d", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49fc5f6e-b6", "ovs_interfaceid": "49fc5f6e-b681-4971-88d0-255d42df1686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 755.007009] env[65503]: DEBUG nova.network.neutron [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Updated VIF entry in instance network info cache for port f5e5c61e-0df6-47d9-ab15-21c80b68c833. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 755.007688] env[65503]: DEBUG nova.network.neutron [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Updating instance_info_cache with network_info: [{"id": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "address": "fa:16:3e:bc:42:78", "network": {"id": "f74e7fa9-c4c6-43e3-bb39-f2f843b914b5", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1457325106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c024f22a228f4d2faa4b9316ca53a1ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e5c61e-0d", "ovs_interfaceid": "f5e5c61e-0df6-47d9-ab15-21c80b68c833", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 755.149548] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.149548] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.207560] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.207944] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.219154] env[65503]: DEBUG nova.compute.manager [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 755.237952] env[65503]: DEBUG nova.network.neutron [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Successfully created port: 4fd37874-1199-4205-b01b-28c18959441c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 755.258908] env[65503]: DEBUG nova.network.neutron [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 755.275701] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 755.282512] env[65503]: DEBUG nova.network.neutron [-] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 755.298951] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.298951] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.317490] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e91e89e1-cbb4-4f1f-ace2-e2658fb126ca tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.959s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.319570] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.319914] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.334162] env[65503]: DEBUG oslo_vmware.api [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449808, 'name': ReconfigVM_Task, 'duration_secs': 0.180125} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.339585] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870317', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'name': 'volume-abf7c28d-f540-4663-b4f2-d36b819413f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9acbc312-a3a2-4758-87cd-5576c4f1f8dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'serial': 'abf7c28d-f540-4663-b4f2-d36b819413f6'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 755.341305] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08614} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.342550] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.343434] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2acedb-5271-4d8a-b7f4-36fdabc4eed2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.372095] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] ca5962fe-3e41-4fae-8860-90fa7278e0fc/ca5962fe-3e41-4fae-8860-90fa7278e0fc.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.375294] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a911672d-15d4-40ca-ac49-58ddc65dbb06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.403171] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 755.403171] env[65503]: value = "task-4449810" [ 755.403171] env[65503]: _type = "Task" [ 755.403171] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.416990] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449810, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.460057] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Acquiring lock "db942a2d-671b-4036-a80b-d2375145cd29" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.460321] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lock "db942a2d-671b-4036-a80b-d2375145cd29" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.472167] env[65503]: DEBUG oslo_concurrency.lockutils [req-260d2cb0-7a2d-4d70-8538-20d94c076391 req-dab42d9e-9880-452d-81ed-0637dea76ff1 service nova] Releasing lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.484156] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef2dfa4-019f-492a-80b7-556a332129be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.493113] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e2dec9-07d7-4bd0-a392-124fcdc64d22 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.528049] env[65503]: DEBUG oslo_concurrency.lockutils [req-0db20ea5-2c92-4a99-bd91-c89654749aa3 req-9ad5f1a8-3e31-4b2e-949b-f55334b660c7 service nova] Releasing lock "refresh_cache-b6cda94b-2894-4cf0-8522-6593df9723bd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.529369] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cc2607-ba3d-417a-9f91-50082c72cc00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.538159] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c60efd9-0ca9-44f7-bbe8-7ac1c62cfff1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.559624] env[65503]: DEBUG nova.compute.provider_tree [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.690742] env[65503]: DEBUG nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 755.720261] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 755.720612] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 755.720819] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 755.721085] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 755.721278] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 755.721472] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 755.721769] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.721974] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 755.722208] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 755.722420] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 755.722677] env[65503]: DEBUG nova.virt.hardware [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 755.723846] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7d64cd-afbd-4cf0-bc95-39184ceebcc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.737375] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb16da48-1497-4eed-80c2-161a2e25631b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.744900] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.774314] env[65503]: WARNING neutronclient.v2_0.client [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 755.775162] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.775508] env[65503]: WARNING openstack [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.785601] env[65503]: WARNING neutronclient.v2_0.client [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 755.786204] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.786509] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.794240] env[65503]: INFO nova.compute.manager [-] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Took 2.25 seconds to deallocate network for instance. [ 755.914694] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449810, 'name': ReconfigVM_Task, 'duration_secs': 0.296995} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.914971] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Reconfigured VM instance instance-0000002a to attach disk [datastore2] ca5962fe-3e41-4fae-8860-90fa7278e0fc/ca5962fe-3e41-4fae-8860-90fa7278e0fc.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.916021] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4175cdd-5250-4255-b4f6-ae044e39c48e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.924672] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 755.924672] env[65503]: value = "task-4449811" [ 755.924672] env[65503]: _type = "Task" [ 755.924672] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.934435] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449811, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.964141] env[65503]: INFO nova.compute.manager [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Detaching volume b697b167-e710-40c5-8992-c508c36ac340 [ 756.009594] env[65503]: INFO nova.virt.block_device [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Attempting to driver detach volume b697b167-e710-40c5-8992-c508c36ac340 from mountpoint /dev/sdb [ 756.009703] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 756.009810] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870312', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'name': 'volume-b697b167-e710-40c5-8992-c508c36ac340', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db942a2d-671b-4036-a80b-d2375145cd29', 'attached_at': '', 'detached_at': '', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'serial': 'b697b167-e710-40c5-8992-c508c36ac340'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 756.010965] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc51bf4-0273-49b6-a69c-63ac52728964 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.035404] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571894e2-d33a-4b9f-947d-196d80f5a8a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.044302] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4aa9b44-c916-43cf-9cd3-cfeffca7ae43 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.068179] env[65503]: DEBUG nova.scheduler.client.report [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 756.077027] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48906b26-0b78-4e59-b61c-e96d31e40075 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.097574] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] The volume has not been displaced from its original location: [datastore2] volume-b697b167-e710-40c5-8992-c508c36ac340/volume-b697b167-e710-40c5-8992-c508c36ac340.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 756.103222] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Reconfiguring VM instance instance-0000000a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 756.104803] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3927cfa1-87e4-480c-80fe-01915aeee389 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.130366] env[65503]: DEBUG oslo_vmware.api [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Waiting for the task: (returnval){ [ 756.130366] env[65503]: value = "task-4449812" [ 756.130366] env[65503]: _type = "Task" [ 756.130366] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.139736] env[65503]: DEBUG oslo_vmware.api [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449812, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.199302] env[65503]: DEBUG nova.network.neutron [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Updating instance_info_cache with network_info: [{"id": "2a598d20-dc23-42a4-9d99-962df4c4d391", "address": "fa:16:3e:f7:60:54", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a598d20-dc", "ovs_interfaceid": "2a598d20-dc23-42a4-9d99-962df4c4d391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 756.302817] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 756.303361] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 756.311764] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.393554] env[65503]: DEBUG nova.objects.instance [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'flavor' on Instance uuid 9acbc312-a3a2-4758-87cd-5576c4f1f8dc {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 756.435688] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449811, 'name': Rename_Task, 'duration_secs': 0.182302} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.435688] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.435938] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81fe37c5-da75-4ebc-8810-d1cba9de0006 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.443063] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 756.443063] env[65503]: value = "task-4449813" [ 756.443063] env[65503]: _type = "Task" [ 756.443063] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.453528] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.498265] env[65503]: WARNING neutronclient.v2_0.client [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 756.498958] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 756.499313] env[65503]: WARNING openstack [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 756.582688] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.060s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.583332] env[65503]: DEBUG nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 756.586254] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.576s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.586586] env[65503]: DEBUG nova.objects.instance [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lazy-loading 'resources' on Instance uuid 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 756.641626] env[65503]: DEBUG oslo_vmware.api [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449812, 'name': ReconfigVM_Task, 'duration_secs': 0.246541} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.641920] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Reconfigured VM instance instance-0000000a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 756.646654] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7156844a-b39c-4e6d-8cb2-e9ca1389eaa5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.663176] env[65503]: DEBUG oslo_vmware.api [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Waiting for the task: (returnval){ [ 756.663176] env[65503]: value = "task-4449814" [ 756.663176] env[65503]: _type = "Task" [ 756.663176] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.671993] env[65503]: DEBUG oslo_vmware.api [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.702583] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "refresh_cache-585a3d16-ee0c-4b71-9c0d-17b4bc968d09" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.703337] env[65503]: DEBUG nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Instance network_info: |[{"id": "2a598d20-dc23-42a4-9d99-962df4c4d391", "address": "fa:16:3e:f7:60:54", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a598d20-dc", "ovs_interfaceid": "2a598d20-dc23-42a4-9d99-962df4c4d391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 756.703595] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:60:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a598d20-dc23-42a4-9d99-962df4c4d391', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.711019] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Creating folder: Project (fd6e7f469b7d408fae0621171c096f0a). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.711337] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02497050-9a68-4cf5-af50-8a3b79f9b640 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.725790] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Created folder: Project (fd6e7f469b7d408fae0621171c096f0a) in parent group-v870190. [ 756.725790] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Creating folder: Instances. Parent ref: group-v870321. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.725790] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90b6397c-8a32-4eb0-8a86-9d92931a21eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.737208] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Created folder: Instances in parent group-v870321. [ 756.737467] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 756.737664] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.737885] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adea33ad-ee10-4fa5-9289-203e8351af25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.759011] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "8f0bf665-b21b-42ed-816d-69dee2f40654" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.759276] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.759476] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "8f0bf665-b21b-42ed-816d-69dee2f40654-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.759647] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.759803] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.761413] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.761413] env[65503]: value = "task-4449817" [ 756.761413] env[65503]: _type = "Task" [ 756.761413] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.761900] env[65503]: INFO nova.compute.manager [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Terminating instance [ 756.773110] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449817, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.815177] env[65503]: DEBUG nova.network.neutron [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Successfully updated port: 4fd37874-1199-4205-b01b-28c18959441c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 756.900395] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79ff2042-23bb-40ab-b6eb-973ec1849690 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.839s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.933793] env[65503]: DEBUG nova.network.neutron [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Updating instance_info_cache with network_info: [{"id": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "address": "fa:16:3e:e2:e0:0e", "network": {"id": "e5ccce0e-9a11-4b6a-a9ed-0c36c1f9468b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1915179751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb5f92-c2", "ovs_interfaceid": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e646f6df-8d32-4092-a47b-63baf184da39", "address": "fa:16:3e:26:dc:60", "network": {"id": "b82fac09-f045-4ef3-b39e-bc084a6b0ecd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-949924037", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape646f6df-8d", "ovs_interfaceid": "e646f6df-8d32-4092-a47b-63baf184da39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 756.958269] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449813, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.090907] env[65503]: DEBUG nova.compute.utils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 757.097139] env[65503]: DEBUG nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 757.097546] env[65503]: DEBUG nova.network.neutron [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 757.097691] env[65503]: WARNING neutronclient.v2_0.client [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 757.098365] env[65503]: WARNING neutronclient.v2_0.client [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 757.099056] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 757.099348] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 757.176687] env[65503]: DEBUG oslo_vmware.api [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Task: {'id': task-4449814, 'name': ReconfigVM_Task, 'duration_secs': 0.254991} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.176988] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870312', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'name': 'volume-b697b167-e710-40c5-8992-c508c36ac340', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db942a2d-671b-4036-a80b-d2375145cd29', 'attached_at': '', 'detached_at': '', 'volume_id': 'b697b167-e710-40c5-8992-c508c36ac340', 'serial': 'b697b167-e710-40c5-8992-c508c36ac340'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 757.204080] env[65503]: DEBUG nova.policy [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ecd3784a4a541c3907979c4dab0ac20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2254fd86b74662975d3ad1fa4b0f74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 757.271058] env[65503]: DEBUG nova.compute.manager [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 757.271393] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 757.276583] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bf0cdf-a450-4253-9123-926c72b0b56a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.278799] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449817, 'name': CreateVM_Task, 'duration_secs': 0.399416} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.282070] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.282788] env[65503]: WARNING neutronclient.v2_0.client [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 757.283261] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.283428] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.283741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 757.284027] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b36d3076-5553-463f-9e53-19f332620b18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.290422] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 757.291098] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c215eb7-714b-4923-b4dd-b847f021a04c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.297235] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 757.297235] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d30ea6-1b22-4b90-ec31-09937ff31a55" [ 757.297235] env[65503]: _type = "Task" [ 757.297235] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.299119] env[65503]: DEBUG oslo_vmware.api [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 757.299119] env[65503]: value = "task-4449818" [ 757.299119] env[65503]: _type = "Task" [ 757.299119] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.321467] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "refresh_cache-5cefb589-9947-4fc1-89b4-d888f8c8f644" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.321714] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquired lock "refresh_cache-5cefb589-9947-4fc1-89b4-d888f8c8f644" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.321899] env[65503]: DEBUG nova.network.neutron [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 757.327056] env[65503]: DEBUG oslo_vmware.api [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449818, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.327349] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d30ea6-1b22-4b90-ec31-09937ff31a55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.438014] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Releasing lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.438626] env[65503]: DEBUG nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Instance network_info: |[{"id": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "address": "fa:16:3e:e2:e0:0e", "network": {"id": "e5ccce0e-9a11-4b6a-a9ed-0c36c1f9468b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1915179751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb5f92-c2", "ovs_interfaceid": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e646f6df-8d32-4092-a47b-63baf184da39", "address": "fa:16:3e:26:dc:60", "network": {"id": "b82fac09-f045-4ef3-b39e-bc084a6b0ecd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-949924037", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape646f6df-8d", "ovs_interfaceid": "e646f6df-8d32-4092-a47b-63baf184da39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 757.439147] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Acquired lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.439367] env[65503]: DEBUG nova.network.neutron [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Refreshing network info cache for port 59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 757.442463] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:e0:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:dc:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e646f6df-8d32-4092-a47b-63baf184da39', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 757.459415] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 757.462840] env[65503]: WARNING neutronclient.v2_0.client [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 757.463145] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 757.464541] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 757.478187] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 757.482307] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd478503-6635-489c-b3d4-fce5892b8ad2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.516484] env[65503]: DEBUG oslo_vmware.api [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449813, 'name': PowerOnVM_Task, 'duration_secs': 0.677184} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.518635] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.518929] env[65503]: INFO nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Took 10.05 seconds to spawn the instance on the hypervisor. [ 757.519155] env[65503]: DEBUG nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 757.519501] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 757.519501] env[65503]: value = "task-4449819" [ 757.519501] env[65503]: _type = "Task" [ 757.519501] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.520600] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2e5068-d688-4f17-8524-d59d33fca0d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.543893] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449819, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.556809] env[65503]: DEBUG nova.network.neutron [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Successfully created port: d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 757.598901] env[65503]: DEBUG nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 757.753296] env[65503]: DEBUG nova.objects.instance [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lazy-loading 'flavor' on Instance uuid db942a2d-671b-4036-a80b-d2375145cd29 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 757.775995] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062c7264-987d-4acd-9daa-f0790b1f0d32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.785857] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576acc03-1983-4a63-83ec-ebc0ac964687 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.828527] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276752e4-317f-480a-8e21-3e492fb70bcb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.831587] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 757.831844] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 757.846385] env[65503]: DEBUG oslo_vmware.api [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449818, 'name': PowerOffVM_Task, 'duration_secs': 0.520354} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.851778] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 757.852346] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 757.852646] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d30ea6-1b22-4b90-ec31-09937ff31a55, 'name': SearchDatastore_Task, 'duration_secs': 0.021806} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.852876] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e92a973-5f34-4a04-944c-153147a442eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.855731] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c26ae2-2cad-404d-9c63-ea21aac01c8e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.859882] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.860131] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.860341] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.860473] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.862301] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.862301] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11fdb655-a0eb-480d-984c-681d5ab51bad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.879994] env[65503]: DEBUG nova.compute.provider_tree [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.882491] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.882491] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.883482] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18938068-2924-4c35-b2ec-22b4418b6d2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.891971] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 757.891971] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5235e5e1-63ef-4202-79ec-ec88e859c018" [ 757.891971] env[65503]: _type = "Task" [ 757.891971] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.902722] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5235e5e1-63ef-4202-79ec-ec88e859c018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.933996] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 757.934226] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 757.934474] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Deleting the datastore file [datastore1] 8f0bf665-b21b-42ed-816d-69dee2f40654 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 757.934810] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-522a420e-6c69-4afb-b064-2ef4ca11917b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.943321] env[65503]: DEBUG oslo_vmware.api [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for the task: (returnval){ [ 757.943321] env[65503]: value = "task-4449821" [ 757.943321] env[65503]: _type = "Task" [ 757.943321] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.954142] env[65503]: DEBUG oslo_vmware.api [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.020555] env[65503]: DEBUG nova.network.neutron [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 758.035538] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449819, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.054742] env[65503]: INFO nova.compute.manager [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Took 15.48 seconds to build instance. [ 758.384677] env[65503]: DEBUG nova.scheduler.client.report [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.412142] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5235e5e1-63ef-4202-79ec-ec88e859c018, 'name': SearchDatastore_Task, 'duration_secs': 0.016815} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.413987] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25fd754a-4ec1-4e57-b6c6-c81575105343 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.422710] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 758.422710] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527aaba1-19ae-f533-b3e2-955e97fdb803" [ 758.422710] env[65503]: _type = "Task" [ 758.422710] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.436462] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527aaba1-19ae-f533-b3e2-955e97fdb803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.457019] env[65503]: DEBUG oslo_vmware.api [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Task: {'id': task-4449821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235917} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.457313] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 758.457513] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 758.457702] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 758.457876] env[65503]: INFO nova.compute.manager [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Took 1.19 seconds to destroy the instance on the hypervisor. [ 758.458133] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 758.458422] env[65503]: DEBUG nova.compute.manager [-] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 758.458531] env[65503]: DEBUG nova.network.neutron [-] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 758.458914] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 758.459418] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 758.459749] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 758.474689] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.475916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.475916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.475916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.475916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.479650] env[65503]: INFO nova.compute.manager [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Terminating instance [ 758.544317] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449819, 'name': CreateVM_Task, 'duration_secs': 0.593206} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.544629] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 758.545445] env[65503]: WARNING neutronclient.v2_0.client [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 758.545947] env[65503]: WARNING neutronclient.v2_0.client [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 758.546444] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.546671] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.547138] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 758.547533] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b913ff6-6e33-4264-92c2-28176cb4e464 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.554597] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 758.554597] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e3e02f-7ed5-0a03-2b0d-c29e8ede2bb0" [ 758.554597] env[65503]: _type = "Task" [ 758.554597] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.558546] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ca0005e1-8c21-4eb9-a5da-2727fbf0845e tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.993s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.567415] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e3e02f-7ed5-0a03-2b0d-c29e8ede2bb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.610762] env[65503]: DEBUG nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 758.640021] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 758.641063] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 758.641063] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 758.641063] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 758.641246] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 758.641348] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 758.641529] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 758.641734] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 758.641986] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 758.642663] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 758.642897] env[65503]: DEBUG nova.virt.hardware [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 758.643825] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a96d165-061a-407e-a14f-d4b015c389cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.654751] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd305f57-2ea9-4248-9cbc-bbe344081bc2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.760671] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4fff8f2b-9636-40c7-8d46-1c7dc7b777de tempest-VolumesAssistedSnapshotsTest-1107869941 tempest-VolumesAssistedSnapshotsTest-1107869941-project-admin] Lock "db942a2d-671b-4036-a80b-d2375145cd29" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.300s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.890145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.304s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.892859] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.686s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.893035] env[65503]: DEBUG nova.objects.instance [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lazy-loading 'resources' on Instance uuid eec6a484-ab00-402e-a369-c3009065c553 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 758.921171] env[65503]: INFO nova.scheduler.client.report [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted allocations for instance 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1 [ 758.941550] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527aaba1-19ae-f533-b3e2-955e97fdb803, 'name': SearchDatastore_Task, 'duration_secs': 0.021957} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.941771] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.941878] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 585a3d16-ee0c-4b71-9c0d-17b4bc968d09/585a3d16-ee0c-4b71-9c0d-17b4bc968d09.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 758.942164] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-649c008f-6f45-43e9-adb2-8930c822b32f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.950837] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 758.950837] env[65503]: value = "task-4449822" [ 758.950837] env[65503]: _type = "Task" [ 758.950837] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.964370] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449822, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.983588] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 758.984069] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 758.996696] env[65503]: DEBUG nova.compute.manager [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 758.996986] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 758.997713] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0817814d-ed9d-4ba1-9da7-7ac5ec59de20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.006970] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 759.007688] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.018466] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 759.018466] env[65503]: value = "task-4449823" [ 759.018466] env[65503]: _type = "Task" [ 759.018466] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.029328] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.067763] env[65503]: DEBUG nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 759.078281] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e3e02f-7ed5-0a03-2b0d-c29e8ede2bb0, 'name': SearchDatastore_Task, 'duration_secs': 0.026461} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.078633] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.078898] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.079507] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.079507] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.079507] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.080390] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-430a7f3d-d6e6-412b-a61b-73d3221d5299 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.090811] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.090996] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.092119] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06a5b2fe-8035-48f1-9392-b17fb511f703 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.102037] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 759.102037] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529aeb02-53b8-a729-b923-709503ed0c7b" [ 759.102037] env[65503]: _type = "Task" [ 759.102037] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.113159] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529aeb02-53b8-a729-b923-709503ed0c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.134190] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 759.395179] env[65503]: DEBUG nova.network.neutron [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Successfully updated port: d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 759.440457] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ee5981f7-42de-41eb-851e-756062ef5bc5 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.238s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.446200] env[65503]: WARNING neutronclient.v2_0.client [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 759.447635] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 759.447635] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.463843] env[65503]: WARNING neutronclient.v2_0.client [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 759.463843] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 759.463843] env[65503]: WARNING openstack [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.480608] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449822, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.552756] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449823, 'name': PowerOffVM_Task, 'duration_secs': 0.383011} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.555535] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 759.555753] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 759.555948] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870317', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'name': 'volume-abf7c28d-f540-4663-b4f2-d36b819413f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9acbc312-a3a2-4758-87cd-5576c4f1f8dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'serial': 'abf7c28d-f540-4663-b4f2-d36b819413f6'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 759.564477] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fc2591-8174-4e3e-9ba4-c5baefa3ea9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.600485] env[65503]: DEBUG nova.network.neutron [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Updated VIF entry in instance network info cache for port 59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 759.600809] env[65503]: DEBUG nova.network.neutron [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Updating instance_info_cache with network_info: [{"id": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "address": "fa:16:3e:e2:e0:0e", "network": {"id": "e5ccce0e-9a11-4b6a-a9ed-0c36c1f9468b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1915179751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb5f92-c2", "ovs_interfaceid": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e646f6df-8d32-4092-a47b-63baf184da39", "address": "fa:16:3e:26:dc:60", "network": {"id": "b82fac09-f045-4ef3-b39e-bc084a6b0ecd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-949924037", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape646f6df-8d", "ovs_interfaceid": "e646f6df-8d32-4092-a47b-63baf184da39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 759.607194] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be826595-704a-47aa-ba13-cb2e7b22e31a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.623384] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c36cd5-bfba-4548-b6ca-cb21b9a56538 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.625674] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529aeb02-53b8-a729-b923-709503ed0c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.011794} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.626617] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.630539] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad0c558c-1dc9-4163-b0ef-2a55eeb7cd47 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.654730] env[65503]: DEBUG nova.network.neutron [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Updating instance_info_cache with network_info: [{"id": "4fd37874-1199-4205-b01b-28c18959441c", "address": "fa:16:3e:45:08:74", "network": {"id": "670fed0b-9502-4fc3-8a27-8458398be406", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1967452757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "bca4e8b44ecb4c24803c35c24892bdc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd37874-11", "ovs_interfaceid": "4fd37874-1199-4205-b01b-28c18959441c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 759.659305] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fbf29e-48fa-4dbe-b9ce-59847d7a2675 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.664348] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 759.664348] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52316b3c-19a8-c122-4b22-52bae766b9ba" [ 759.664348] env[65503]: _type = "Task" [ 759.664348] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.680429] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The volume has not been displaced from its original location: [datastore2] volume-abf7c28d-f540-4663-b4f2-d36b819413f6/volume-abf7c28d-f540-4663-b4f2-d36b819413f6.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 759.686312] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Reconfiguring VM instance instance-00000025 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 759.690083] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6582642-f2e6-47b2-b723-9a92ff2ad543 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.708780] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52316b3c-19a8-c122-4b22-52bae766b9ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.715501] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 759.715501] env[65503]: value = "task-4449824" [ 759.715501] env[65503]: _type = "Task" [ 759.715501] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.727930] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449824, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.734157] env[65503]: DEBUG nova.compute.manager [req-dfe3ff12-11e5-4074-88ea-2c56e285c91a req-2300b4d9-82d5-48ca-96c0-4027b7189457 service nova] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Received event network-vif-deleted-2da53c45-1677-47a0-99c7-20ed6ebfc0ad {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 759.764392] env[65503]: DEBUG nova.compute.manager [req-3721be44-bd6f-4961-bdac-dac8646536fe req-75e0de2c-d26f-46d3-bb7e-60dbabcfd34d service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Received event network-vif-plugged-4fd37874-1199-4205-b01b-28c18959441c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 759.764688] env[65503]: DEBUG oslo_concurrency.lockutils [req-3721be44-bd6f-4961-bdac-dac8646536fe req-75e0de2c-d26f-46d3-bb7e-60dbabcfd34d service nova] Acquiring lock "5cefb589-9947-4fc1-89b4-d888f8c8f644-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.764903] env[65503]: DEBUG oslo_concurrency.lockutils [req-3721be44-bd6f-4961-bdac-dac8646536fe req-75e0de2c-d26f-46d3-bb7e-60dbabcfd34d service nova] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.765268] env[65503]: DEBUG oslo_concurrency.lockutils [req-3721be44-bd6f-4961-bdac-dac8646536fe req-75e0de2c-d26f-46d3-bb7e-60dbabcfd34d service nova] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.765464] env[65503]: DEBUG nova.compute.manager [req-3721be44-bd6f-4961-bdac-dac8646536fe req-75e0de2c-d26f-46d3-bb7e-60dbabcfd34d service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] No waiting events found dispatching network-vif-plugged-4fd37874-1199-4205-b01b-28c18959441c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 759.765680] env[65503]: WARNING nova.compute.manager [req-3721be44-bd6f-4961-bdac-dac8646536fe req-75e0de2c-d26f-46d3-bb7e-60dbabcfd34d service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Received unexpected event network-vif-plugged-4fd37874-1199-4205-b01b-28c18959441c for instance with vm_state building and task_state spawning. [ 759.904529] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.904705] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.904877] env[65503]: DEBUG nova.network.neutron [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 759.967240] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449822, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598046} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.967629] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 585a3d16-ee0c-4b71-9c0d-17b4bc968d09/585a3d16-ee0c-4b71-9c0d-17b4bc968d09.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.967991] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.971661] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81fba816-6565-4b70-ae6b-e19b2377795a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.982729] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 759.982729] env[65503]: value = "task-4449826" [ 759.982729] env[65503]: _type = "Task" [ 759.982729] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.985833] env[65503]: DEBUG nova.network.neutron [-] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 759.995920] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.112159] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Releasing lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.112441] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Received event network-vif-plugged-2a598d20-dc23-42a4-9d99-962df4c4d391 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 760.112677] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Acquiring lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.112890] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.113065] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.113233] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] No waiting events found dispatching network-vif-plugged-2a598d20-dc23-42a4-9d99-962df4c4d391 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 760.113636] env[65503]: WARNING nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Received unexpected event network-vif-plugged-2a598d20-dc23-42a4-9d99-962df4c4d391 for instance with vm_state building and task_state spawning. [ 760.113636] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Received event network-changed-2a598d20-dc23-42a4-9d99-962df4c4d391 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 760.113810] env[65503]: DEBUG nova.compute.manager [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Refreshing instance network info cache due to event network-changed-2a598d20-dc23-42a4-9d99-962df4c4d391. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 760.114108] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Acquiring lock "refresh_cache-585a3d16-ee0c-4b71-9c0d-17b4bc968d09" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.114288] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Acquired lock "refresh_cache-585a3d16-ee0c-4b71-9c0d-17b4bc968d09" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.114476] env[65503]: DEBUG nova.network.neutron [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Refreshing network info cache for port 2a598d20-dc23-42a4-9d99-962df4c4d391 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 760.116308] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Acquired lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.116410] env[65503]: DEBUG nova.network.neutron [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Refreshing network info cache for port e646f6df-8d32-4092-a47b-63baf184da39 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 760.125262] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb8226b-d136-42ce-b089-7729ffa7f5e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.137478] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc5846e-d426-49fe-9686-c1cbd5c95fcf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.170948] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Releasing lock "refresh_cache-5cefb589-9947-4fc1-89b4-d888f8c8f644" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.171353] env[65503]: DEBUG nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Instance network_info: |[{"id": "4fd37874-1199-4205-b01b-28c18959441c", "address": "fa:16:3e:45:08:74", "network": {"id": "670fed0b-9502-4fc3-8a27-8458398be406", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1967452757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "bca4e8b44ecb4c24803c35c24892bdc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd37874-11", "ovs_interfaceid": "4fd37874-1199-4205-b01b-28c18959441c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 760.172244] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:08:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fd37874-1199-4205-b01b-28c18959441c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.179502] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Creating folder: Project (bca4e8b44ecb4c24803c35c24892bdc3). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.183492] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb06c93f-a448-4799-84ef-72b52c5bf812 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.187461] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a2cf278-b128-4252-b330-98dd9e487693 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.199628] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f84d82-4102-43f3-bb25-b2fb9494568c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.203962] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52316b3c-19a8-c122-4b22-52bae766b9ba, 'name': SearchDatastore_Task, 'duration_secs': 0.05325} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.206159] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.206551] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f8d61ded-ddf7-4ec9-88e7-92ffb6934733/f8d61ded-ddf7-4ec9-88e7-92ffb6934733.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.206735] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Created folder: Project (bca4e8b44ecb4c24803c35c24892bdc3) in parent group-v870190. [ 760.206899] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Creating folder: Instances. Parent ref: group-v870325. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.207610] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7d17c76-1bad-4399-9cf0-7720da242ad0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.211163] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2714189-4120-4dcf-8e62-a4533970b7dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.221474] env[65503]: DEBUG nova.compute.provider_tree [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.229606] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 760.229606] env[65503]: value = "task-4449829" [ 760.229606] env[65503]: _type = "Task" [ 760.229606] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.234025] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449824, 'name': ReconfigVM_Task, 'duration_secs': 0.227552} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.237962] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Reconfigured VM instance instance-00000025 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 760.242798] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Created folder: Instances in parent group-v870325. [ 760.243087] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 760.243663] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44e439ea-7d74-4b6b-b5eb-baa3e46dc24b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.254728] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.256137] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff24bf9b-42fc-457e-b222-81830828d5a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.275343] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.281626] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 760.281626] env[65503]: value = "task-4449830" [ 760.281626] env[65503]: _type = "Task" [ 760.281626] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.282281] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.282281] env[65503]: value = "task-4449831" [ 760.282281] env[65503]: _type = "Task" [ 760.282281] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.297787] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449830, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.301889] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449831, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.409996] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.411604] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.456862] env[65503]: DEBUG nova.network.neutron [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 760.489643] env[65503]: INFO nova.compute.manager [-] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Took 2.03 seconds to deallocate network for instance. [ 760.494335] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080551} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.497951] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.499201] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d08229-37ce-493a-be6a-64d6150dd6cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.529285] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 585a3d16-ee0c-4b71-9c0d-17b4bc968d09/585a3d16-ee0c-4b71-9c0d-17b4bc968d09.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.531025] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.531926] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.541804] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfa7f07f-d0a8-4df4-be6b-790e370c24f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.570655] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 760.570655] env[65503]: value = "task-4449832" [ 760.570655] env[65503]: _type = "Task" [ 760.570655] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.584824] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449832, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.619652] env[65503]: WARNING neutronclient.v2_0.client [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 760.620402] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.621884] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.629191] env[65503]: WARNING neutronclient.v2_0.client [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 760.629969] env[65503]: WARNING openstack [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.630342] env[65503]: WARNING openstack [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.642931] env[65503]: WARNING neutronclient.v2_0.client [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 760.643235] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.643647] env[65503]: WARNING openstack [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.730623] env[65503]: DEBUG nova.scheduler.client.report [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.746223] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449829, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.804986] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449831, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.808840] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449830, 'name': ReconfigVM_Task, 'duration_secs': 0.334775} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.809284] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870317', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'name': 'volume-abf7c28d-f540-4663-b4f2-d36b819413f6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9acbc312-a3a2-4758-87cd-5576c4f1f8dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'abf7c28d-f540-4663-b4f2-d36b819413f6', 'serial': 'abf7c28d-f540-4663-b4f2-d36b819413f6'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 760.809601] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 760.811745] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.811919] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.820403] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513ffa56-ddab-4960-83ca-f64fd43a407e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.827456] env[65503]: WARNING openstack [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.827971] env[65503]: WARNING openstack [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.841687] env[65503]: DEBUG nova.network.neutron [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updating instance_info_cache with network_info: [{"id": "d53b3aea-2563-4099-a577-bb623eb1e83e", "address": "fa:16:3e:ea:8f:8c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd53b3aea-25", "ovs_interfaceid": "d53b3aea-2563-4099-a577-bb623eb1e83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 760.843101] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.843796] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-caa9b760-cce3-48f3-afc6-7493b75043e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.887047] env[65503]: WARNING neutronclient.v2_0.client [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 760.887047] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.887047] env[65503]: WARNING openstack [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.907823] env[65503]: WARNING neutronclient.v2_0.client [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 760.907823] env[65503]: WARNING openstack [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.908232] env[65503]: WARNING openstack [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 760.919056] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 760.919056] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 760.919056] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleting the datastore file [datastore1] 9acbc312-a3a2-4758-87cd-5576c4f1f8dc {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.919467] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a2c79b2-6e27-4448-b792-afc595d81965 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.927423] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 760.927423] env[65503]: value = "task-4449834" [ 760.927423] env[65503]: _type = "Task" [ 760.927423] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.937738] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.004879] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.031758] env[65503]: DEBUG nova.network.neutron [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Updated VIF entry in instance network info cache for port 2a598d20-dc23-42a4-9d99-962df4c4d391. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 761.032366] env[65503]: DEBUG nova.network.neutron [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Updating instance_info_cache with network_info: [{"id": "2a598d20-dc23-42a4-9d99-962df4c4d391", "address": "fa:16:3e:f7:60:54", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a598d20-dc", "ovs_interfaceid": "2a598d20-dc23-42a4-9d99-962df4c4d391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 761.072634] env[65503]: DEBUG nova.network.neutron [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Updated VIF entry in instance network info cache for port e646f6df-8d32-4092-a47b-63baf184da39. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 761.072634] env[65503]: DEBUG nova.network.neutron [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Updating instance_info_cache with network_info: [{"id": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "address": "fa:16:3e:e2:e0:0e", "network": {"id": "e5ccce0e-9a11-4b6a-a9ed-0c36c1f9468b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1915179751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb5f92-c2", "ovs_interfaceid": "59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e646f6df-8d32-4092-a47b-63baf184da39", "address": "fa:16:3e:26:dc:60", "network": {"id": "b82fac09-f045-4ef3-b39e-bc084a6b0ecd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-949924037", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae68676f87fd4edc802b2e4b4917ceec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape646f6df-8d", "ovs_interfaceid": "e646f6df-8d32-4092-a47b-63baf184da39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 761.088676] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449832, 'name': ReconfigVM_Task, 'duration_secs': 0.436038} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.089512] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 585a3d16-ee0c-4b71-9c0d-17b4bc968d09/585a3d16-ee0c-4b71-9c0d-17b4bc968d09.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.090313] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-675502c8-8f70-4f4b-bfb7-0bce6223527d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.098865] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 761.098865] env[65503]: value = "task-4449835" [ 761.098865] env[65503]: _type = "Task" [ 761.098865] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.109458] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449835, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.241515] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.349s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.244494] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.796s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.246831] env[65503]: INFO nova.compute.claims [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.256037] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616903} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.256273] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f8d61ded-ddf7-4ec9-88e7-92ffb6934733/f8d61ded-ddf7-4ec9-88e7-92ffb6934733.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.256497] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.256766] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf755c6a-ed92-43a0-b784-556f1fa16be1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.265718] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 761.265718] env[65503]: value = "task-4449836" [ 761.265718] env[65503]: _type = "Task" [ 761.265718] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.276211] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.277410] env[65503]: INFO nova.scheduler.client.report [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Deleted allocations for instance eec6a484-ab00-402e-a369-c3009065c553 [ 761.306558] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449831, 'name': CreateVM_Task, 'duration_secs': 0.575208} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.306863] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.307569] env[65503]: WARNING neutronclient.v2_0.client [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 761.308091] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.308389] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.308837] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 761.309218] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-449ed202-0602-4916-8dd0-3829f6effb56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.315957] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 761.315957] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e984c7-80f3-7649-283d-4ece5c677a32" [ 761.315957] env[65503]: _type = "Task" [ 761.315957] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.325753] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e984c7-80f3-7649-283d-4ece5c677a32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.345119] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Releasing lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.345638] env[65503]: DEBUG nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance network_info: |[{"id": "d53b3aea-2563-4099-a577-bb623eb1e83e", "address": "fa:16:3e:ea:8f:8c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd53b3aea-25", "ovs_interfaceid": "d53b3aea-2563-4099-a577-bb623eb1e83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 761.346112] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:8f:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd53b3aea-2563-4099-a577-bb623eb1e83e', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 761.354255] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 761.354911] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 761.355127] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dbd9c58-9725-4c74-bcd0-5e6ed378f238 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.375837] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 761.375837] env[65503]: value = "task-4449837" [ 761.375837] env[65503]: _type = "Task" [ 761.375837] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.439259] env[65503]: DEBUG oslo_vmware.api [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4449834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157951} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.439573] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.439764] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 761.439937] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 761.440125] env[65503]: INFO nova.compute.manager [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Took 2.44 seconds to destroy the instance on the hypervisor. [ 761.440393] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 761.442463] env[65503]: DEBUG nova.compute.manager [-] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 761.442576] env[65503]: DEBUG nova.network.neutron [-] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 761.442839] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 761.443415] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 761.443694] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 761.530935] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 761.535330] env[65503]: DEBUG oslo_concurrency.lockutils [req-34497b3e-5cee-4958-a248-76c3c7d30100 req-ad490335-423b-4cb9-b6e3-88066d95b935 service nova] Releasing lock "refresh_cache-585a3d16-ee0c-4b71-9c0d-17b4bc968d09" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.583433] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d057e74-ddb5-4bc7-995e-89731976fdbb req-13c9b57e-828e-4dd2-a77b-2da807bba934 service nova] Releasing lock "refresh_cache-f8d61ded-ddf7-4ec9-88e7-92ffb6934733" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.588598] env[65503]: DEBUG nova.compute.manager [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Received event network-changed {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 761.588809] env[65503]: DEBUG nova.compute.manager [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Refreshing instance network info cache due to event network-changed. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 761.588993] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Acquiring lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.589152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Acquired lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.589307] env[65503]: DEBUG nova.network.neutron [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 761.617927] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449835, 'name': Rename_Task, 'duration_secs': 0.285008} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.618368] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.618592] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dceaa697-6579-4cff-a0ae-661412d08caf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.628386] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 761.628386] env[65503]: value = "task-4449838" [ 761.628386] env[65503]: _type = "Task" [ 761.628386] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.643708] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.705223] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.705635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.004s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.705980] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.706130] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.706301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.711325] env[65503]: INFO nova.compute.manager [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Terminating instance [ 761.779430] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449836, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104261} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.779701] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 761.780649] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4301327-45a7-4551-a7f2-22f1cd4736e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.814615] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f8d61ded-ddf7-4ec9-88e7-92ffb6934733/f8d61ded-ddf7-4ec9-88e7-92ffb6934733.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.815199] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8266a308-7fae-42db-886a-8ab71e02d6fe tempest-ServersAdminNegativeTestJSON-21398976 tempest-ServersAdminNegativeTestJSON-21398976-project-member] Lock "eec6a484-ab00-402e-a369-c3009065c553" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.500s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.816482] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96bee8d7-f670-4ac3-9b45-da88d3e81951 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.846753] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e984c7-80f3-7649-283d-4ece5c677a32, 'name': SearchDatastore_Task, 'duration_secs': 0.011763} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.849909] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.849909] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.849909] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.849909] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.849909] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.849909] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 761.849909] env[65503]: value = "task-4449839" [ 761.849909] env[65503]: _type = "Task" [ 761.849909] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.849909] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4149828e-f7ec-497b-8ddc-0054926df095 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.862130] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449839, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.864919] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.865103] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.866346] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71a71908-fd57-459a-86b2-2b1d5179c728 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.876030] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 761.876030] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52908aed-b3b6-9e45-2c99-2662d5a6e5e8" [ 761.876030] env[65503]: _type = "Task" [ 761.876030] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.894996] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52908aed-b3b6-9e45-2c99-2662d5a6e5e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.895790] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449837, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.024211] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "083797a8-8daf-493b-89de-7ae9137ed538" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.024519] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "083797a8-8daf-493b-89de-7ae9137ed538" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.066062] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.066360] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.097398] env[65503]: WARNING neutronclient.v2_0.client [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 762.097398] env[65503]: WARNING openstack [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 762.097398] env[65503]: WARNING openstack [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 762.145790] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449838, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.216035] env[65503]: DEBUG nova.compute.manager [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 762.216292] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.217112] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a395570-faa0-4674-8dd1-6011aa4e7727 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.226327] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.226511] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-539168e3-ab84-43ca-94c0-7322133c1c94 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.235105] env[65503]: DEBUG oslo_vmware.api [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 762.235105] env[65503]: value = "task-4449840" [ 762.235105] env[65503]: _type = "Task" [ 762.235105] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.246008] env[65503]: DEBUG oslo_vmware.api [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.276549] env[65503]: DEBUG nova.network.neutron [-] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 762.293587] env[65503]: WARNING openstack [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 762.294853] env[65503]: WARNING openstack [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 762.366000] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449839, 'name': ReconfigVM_Task, 'duration_secs': 0.498495} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.370867] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f8d61ded-ddf7-4ec9-88e7-92ffb6934733/f8d61ded-ddf7-4ec9-88e7-92ffb6934733.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.374427] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-970d9232-9cf2-4924-8e2d-019938c491b5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.390396] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52908aed-b3b6-9e45-2c99-2662d5a6e5e8, 'name': SearchDatastore_Task, 'duration_secs': 0.01657} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.395411] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 762.395411] env[65503]: value = "task-4449841" [ 762.395411] env[65503]: _type = "Task" [ 762.395411] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.396306] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38dbbd6f-3dd4-427e-ae2b-71f5fb7926a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.405991] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449837, 'name': CreateVM_Task, 'duration_secs': 0.709759} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.406607] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 762.408331] env[65503]: WARNING neutronclient.v2_0.client [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 762.408331] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.408331] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.408331] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 762.408764] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-158d7ab5-a875-42d5-bd7d-95eb28d4ffdd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.416416] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449841, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.416875] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 762.416875] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527e1b3c-bb38-ab83-57a0-ca6bb55da4b8" [ 762.416875] env[65503]: _type = "Task" [ 762.416875] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.425705] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 762.425705] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521d9f96-3210-f4cb-cc37-8799fb7e0005" [ 762.425705] env[65503]: _type = "Task" [ 762.425705] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.427218] env[65503]: WARNING neutronclient.v2_0.client [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 762.427892] env[65503]: WARNING openstack [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 762.428335] env[65503]: WARNING openstack [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 762.448320] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527e1b3c-bb38-ab83-57a0-ca6bb55da4b8, 'name': SearchDatastore_Task, 'duration_secs': 0.020158} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.450066] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.450443] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5cefb589-9947-4fc1-89b4-d888f8c8f644/5cefb589-9947-4fc1-89b4-d888f8c8f644.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.450799] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e9f6d2c-5697-43df-bdbf-0f87e36040e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.458490] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521d9f96-3210-f4cb-cc37-8799fb7e0005, 'name': SearchDatastore_Task, 'duration_secs': 0.030442} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.459152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.459406] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 762.459732] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.460272] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.460272] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.460563] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c8acfed-54df-4068-b647-895970a8fc47 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.466739] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 762.466739] env[65503]: value = "task-4449842" [ 762.466739] env[65503]: _type = "Task" [ 762.466739] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.477790] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449842, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.479169] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.479351] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 762.480172] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ea0d250-a73a-45a1-aa8b-dcc8bf9d7067 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.487647] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 762.487647] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dd5999-9da4-d226-6fd6-e1fe413e345f" [ 762.487647] env[65503]: _type = "Task" [ 762.487647] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.500245] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dd5999-9da4-d226-6fd6-e1fe413e345f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.559318] env[65503]: DEBUG nova.network.neutron [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Updating instance_info_cache with network_info: [{"id": "49fc5f6e-b681-4971-88d0-255d42df1686", "address": "fa:16:3e:d8:4a:5d", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49fc5f6e-b6", "ovs_interfaceid": "49fc5f6e-b681-4971-88d0-255d42df1686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 762.646825] env[65503]: DEBUG oslo_vmware.api [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449838, 'name': PowerOnVM_Task, 'duration_secs': 0.696705} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.647252] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.647526] env[65503]: INFO nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Took 9.71 seconds to spawn the instance on the hypervisor. [ 762.647779] env[65503]: DEBUG nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 762.649227] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99cc8b2-5203-45c5-b36b-8610fb463a81 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.710535] env[65503]: DEBUG nova.compute.manager [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Received event network-vif-plugged-d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 762.710751] env[65503]: DEBUG oslo_concurrency.lockutils [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Acquiring lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.710954] env[65503]: DEBUG oslo_concurrency.lockutils [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.711132] env[65503]: DEBUG oslo_concurrency.lockutils [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.711407] env[65503]: DEBUG nova.compute.manager [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] No waiting events found dispatching network-vif-plugged-d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 762.711640] env[65503]: WARNING nova.compute.manager [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Received unexpected event network-vif-plugged-d53b3aea-2563-4099-a577-bb623eb1e83e for instance with vm_state building and task_state spawning. [ 762.711834] env[65503]: DEBUG nova.compute.manager [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Received event network-changed-d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 762.711933] env[65503]: DEBUG nova.compute.manager [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Refreshing instance network info cache due to event network-changed-d53b3aea-2563-4099-a577-bb623eb1e83e. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 762.712132] env[65503]: DEBUG oslo_concurrency.lockutils [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Acquiring lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.712259] env[65503]: DEBUG oslo_concurrency.lockutils [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Acquired lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.712406] env[65503]: DEBUG nova.network.neutron [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Refreshing network info cache for port d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 762.722144] env[65503]: DEBUG nova.compute.manager [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Received event network-changed-4fd37874-1199-4205-b01b-28c18959441c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 762.722478] env[65503]: DEBUG nova.compute.manager [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Refreshing instance network info cache due to event network-changed-4fd37874-1199-4205-b01b-28c18959441c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 762.722546] env[65503]: DEBUG oslo_concurrency.lockutils [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Acquiring lock "refresh_cache-5cefb589-9947-4fc1-89b4-d888f8c8f644" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.722695] env[65503]: DEBUG oslo_concurrency.lockutils [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Acquired lock "refresh_cache-5cefb589-9947-4fc1-89b4-d888f8c8f644" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.722861] env[65503]: DEBUG nova.network.neutron [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Refreshing network info cache for port 4fd37874-1199-4205-b01b-28c18959441c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 762.748365] env[65503]: DEBUG oslo_vmware.api [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449840, 'name': PowerOffVM_Task, 'duration_secs': 0.269933} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.751664] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.751970] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.752428] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5346fa87-02da-4af5-a912-f508751a4308 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.780372] env[65503]: INFO nova.compute.manager [-] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Took 1.34 seconds to deallocate network for instance. [ 762.845374] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.845677] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.845937] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Deleting the datastore file [datastore1] 75578ccd-2b34-4948-9afa-ac94e9fd8b4b {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.846256] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b1412ec-e0f5-4444-b80f-5102fe35a894 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.856278] env[65503]: DEBUG oslo_vmware.api [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for the task: (returnval){ [ 762.856278] env[65503]: value = "task-4449844" [ 762.856278] env[65503]: _type = "Task" [ 762.856278] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.872346] env[65503]: DEBUG oslo_vmware.api [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.912499] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449841, 'name': Rename_Task, 'duration_secs': 0.24229} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.916169] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 762.916851] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28ae380a-0c92-4e3f-af33-3d137be6c018 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.924208] env[65503]: DEBUG nova.objects.instance [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lazy-loading 'flavor' on Instance uuid 0c0c6d3e-f4d2-458f-aa69-19f87a37f162 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 762.928795] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 762.928795] env[65503]: value = "task-4449845" [ 762.928795] env[65503]: _type = "Task" [ 762.928795] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.936274] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c2e782-43c5-4751-b4e2-495dfe827b5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.949239] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449845, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.953391] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e522247e-ccc9-4f3c-9c22-bead2eda317e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.004951] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077d143d-86ee-428c-9720-cf4482536a6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.016294] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449842, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.028955] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dd5999-9da4-d226-6fd6-e1fe413e345f, 'name': SearchDatastore_Task, 'duration_secs': 0.02975} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.032998] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeab3475-e6ba-4a50-b1cd-19d882e14d22 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.037177] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea550075-176d-4661-999b-9c68a43a38be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.046427] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 763.046427] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52516878-db1f-0086-ed34-da57bfe611b5" [ 763.046427] env[65503]: _type = "Task" [ 763.046427] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.057926] env[65503]: DEBUG nova.compute.provider_tree [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.065849] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f23d234-81c4-4a17-abca-d5ac7cc9d22f tempest-ServerExternalEventsTest-801830376 tempest-ServerExternalEventsTest-801830376-project] Releasing lock "refresh_cache-ca5962fe-3e41-4fae-8860-90fa7278e0fc" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.079515] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52516878-db1f-0086-ed34-da57bfe611b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.120078] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.120431] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.120673] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.120889] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.121149] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.123968] env[65503]: INFO nova.compute.manager [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Terminating instance [ 763.175127] env[65503]: INFO nova.compute.manager [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Took 19.49 seconds to build instance. [ 763.215460] env[65503]: WARNING neutronclient.v2_0.client [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.217862] env[65503]: WARNING openstack [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.217862] env[65503]: WARNING openstack [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.226135] env[65503]: WARNING neutronclient.v2_0.client [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.226787] env[65503]: WARNING openstack [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.227214] env[65503]: WARNING openstack [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.344943] env[65503]: INFO nova.compute.manager [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Took 0.56 seconds to detach 1 volumes for instance. [ 763.366797] env[65503]: DEBUG oslo_vmware.api [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Task: {'id': task-4449844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.456158} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.367053] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.367237] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.367413] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.367584] env[65503]: INFO nova.compute.manager [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 763.367823] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 763.368035] env[65503]: DEBUG nova.compute.manager [-] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 763.368135] env[65503]: DEBUG nova.network.neutron [-] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 763.368382] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.368951] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.369234] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.429888] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.430093] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquired lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.430377] env[65503]: WARNING neutronclient.v2_0.client [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.431030] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.431381] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.448370] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449845, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.486852] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.506698] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449842, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678016} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.508122] env[65503]: WARNING openstack [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.508515] env[65503]: WARNING openstack [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.516391] env[65503]: WARNING openstack [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.516744] env[65503]: WARNING openstack [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.523414] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5cefb589-9947-4fc1-89b4-d888f8c8f644/5cefb589-9947-4fc1-89b4-d888f8c8f644.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 763.523754] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 763.524842] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b8b6de6-1741-48e4-a22f-810befdb96d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.533975] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 763.533975] env[65503]: value = "task-4449846" [ 763.533975] env[65503]: _type = "Task" [ 763.533975] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.550886] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.561764] env[65503]: DEBUG nova.scheduler.client.report [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.579458] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52516878-db1f-0086-ed34-da57bfe611b5, 'name': SearchDatastore_Task, 'duration_secs': 0.056385} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.579458] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.579700] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4/ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.581048] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5682480-72b0-4445-98d8-b50c7d7d6807 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.590083] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 763.590083] env[65503]: value = "task-4449847" [ 763.590083] env[65503]: _type = "Task" [ 763.590083] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.604739] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.630038] env[65503]: DEBUG nova.compute.manager [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 763.630333] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 763.631422] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d791d4b6-eb63-4eb0-9419-e8b1089b7867 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.644577] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 763.644995] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e32e15b-930d-402a-9393-ea3420529008 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.654209] env[65503]: DEBUG oslo_vmware.api [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 763.654209] env[65503]: value = "task-4449848" [ 763.654209] env[65503]: _type = "Task" [ 763.654209] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.665992] env[65503]: DEBUG oslo_vmware.api [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449848, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.677478] env[65503]: DEBUG oslo_concurrency.lockutils [None req-660a3cbf-bf3c-492c-89c2-26aecd4841d5 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.014s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.801452] env[65503]: INFO nova.compute.manager [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Rebuilding instance [ 763.852049] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.869738] env[65503]: DEBUG nova.compute.manager [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 763.871428] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8900edee-b693-4241-9e54-8958f03ad524 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.919143] env[65503]: WARNING neutronclient.v2_0.client [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.919354] env[65503]: WARNING openstack [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.919791] env[65503]: WARNING openstack [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.930863] env[65503]: WARNING neutronclient.v2_0.client [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.931578] env[65503]: WARNING openstack [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.932036] env[65503]: WARNING openstack [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.951092] env[65503]: DEBUG oslo_vmware.api [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449845, 'name': PowerOnVM_Task, 'duration_secs': 0.911881} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.954022] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 763.954022] env[65503]: INFO nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Took 13.82 seconds to spawn the instance on the hypervisor. [ 763.954022] env[65503]: DEBUG nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 763.954022] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba44118-d68a-4e42-bec0-a9287ee91848 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.055689] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075374} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.055689] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.055689] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0857fb25-c667-41f9-9ee1-1bf055ea1703 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.074827] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.830s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.076044] env[65503]: DEBUG nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 764.087015] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] 5cefb589-9947-4fc1-89b4-d888f8c8f644/5cefb589-9947-4fc1-89b4-d888f8c8f644.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.087712] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.240s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.089618] env[65503]: INFO nova.compute.claims [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.092646] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9841822b-a9a3-4637-9781-36cffaadd844 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.114023] env[65503]: DEBUG nova.network.neutron [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updated VIF entry in instance network info cache for port d53b3aea-2563-4099-a577-bb623eb1e83e. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 764.114023] env[65503]: DEBUG nova.network.neutron [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updating instance_info_cache with network_info: [{"id": "d53b3aea-2563-4099-a577-bb623eb1e83e", "address": "fa:16:3e:ea:8f:8c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd53b3aea-25", "ovs_interfaceid": "d53b3aea-2563-4099-a577-bb623eb1e83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 764.124089] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449847, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.126286] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 764.126286] env[65503]: value = "task-4449849" [ 764.126286] env[65503]: _type = "Task" [ 764.126286] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.136619] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.176690] env[65503]: DEBUG oslo_vmware.api [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449848, 'name': PowerOffVM_Task, 'duration_secs': 0.280821} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.177813] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.178373] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.179884] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb6f04b6-bead-49e6-bfcf-9f62184d14bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.182777] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 764.247227] env[65503]: DEBUG nova.network.neutron [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Updated VIF entry in instance network info cache for port 4fd37874-1199-4205-b01b-28c18959441c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 764.247227] env[65503]: DEBUG nova.network.neutron [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Updating instance_info_cache with network_info: [{"id": "4fd37874-1199-4205-b01b-28c18959441c", "address": "fa:16:3e:45:08:74", "network": {"id": "670fed0b-9502-4fc3-8a27-8458398be406", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1967452757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "bca4e8b44ecb4c24803c35c24892bdc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd37874-11", "ovs_interfaceid": "4fd37874-1199-4205-b01b-28c18959441c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 764.272474] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.272637] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.273265] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Deleting the datastore file [datastore2] ca5962fe-3e41-4fae-8860-90fa7278e0fc {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.274080] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63c9951f-917e-4924-8f2b-bc099ecab4ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.284562] env[65503]: DEBUG oslo_vmware.api [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for the task: (returnval){ [ 764.284562] env[65503]: value = "task-4449851" [ 764.284562] env[65503]: _type = "Task" [ 764.284562] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.301506] env[65503]: DEBUG nova.network.neutron [-] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 764.302247] env[65503]: DEBUG oslo_vmware.api [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.442648] env[65503]: DEBUG nova.network.neutron [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 764.480332] env[65503]: INFO nova.compute.manager [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Took 21.14 seconds to build instance. [ 764.590107] env[65503]: DEBUG nova.compute.utils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 764.592071] env[65503]: DEBUG nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 764.596209] env[65503]: DEBUG nova.network.neutron [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 764.596583] env[65503]: WARNING neutronclient.v2_0.client [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 764.596899] env[65503]: WARNING neutronclient.v2_0.client [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 764.599044] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 764.599044] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 764.608955] env[65503]: DEBUG nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 764.619036] env[65503]: DEBUG oslo_concurrency.lockutils [req-2c5edc25-f54c-41b7-9fd0-8f3f0c79286d req-7a82c223-c5e5-4399-8a39-f673ed0820bd service nova] Releasing lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.632772] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449847, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.814062} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.638275] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4/ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.638563] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.639508] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49b2ab81-ef29-4b9c-b80c-b7a8fcb7467b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.649729] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449849, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.651524] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 764.651524] env[65503]: value = "task-4449852" [ 764.651524] env[65503]: _type = "Task" [ 764.651524] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.662445] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.691856] env[65503]: DEBUG nova.policy [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31218e12a836406eb32ee65a6900ec32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5a588e741704449878e7a03d7892d11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 764.718041] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.750277] env[65503]: DEBUG oslo_concurrency.lockutils [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] Releasing lock "refresh_cache-5cefb589-9947-4fc1-89b4-d888f8c8f644" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.750557] env[65503]: DEBUG nova.compute.manager [req-3372846d-37f2-42d8-ac24-a3e871915365 req-74cad9e7-d8f0-40d5-92da-dfd148928d38 service nova] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Received event network-vif-deleted-1e1fc8a1-f7e8-49f4-b328-b7f029f59874 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 764.798044] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "db942a2d-671b-4036-a80b-d2375145cd29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.798044] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "db942a2d-671b-4036-a80b-d2375145cd29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.798044] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "db942a2d-671b-4036-a80b-d2375145cd29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.798272] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "db942a2d-671b-4036-a80b-d2375145cd29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.798272] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "db942a2d-671b-4036-a80b-d2375145cd29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.800250] env[65503]: DEBUG oslo_vmware.api [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Task: {'id': task-4449851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24306} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.801960] env[65503]: INFO nova.compute.manager [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Terminating instance [ 764.808049] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 764.808049] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 764.808049] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 764.808049] env[65503]: INFO nova.compute.manager [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Took 1.18 seconds to destroy the instance on the hypervisor. [ 764.808049] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 764.808049] env[65503]: INFO nova.compute.manager [-] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Took 1.44 seconds to deallocate network for instance. [ 764.809214] env[65503]: DEBUG nova.compute.manager [-] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 764.809352] env[65503]: DEBUG nova.network.neutron [-] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 764.809570] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 764.810168] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 764.810627] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 764.825324] env[65503]: DEBUG nova.compute.manager [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 764.825324] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.826284] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597f21e2-2231-433d-89cd-e5ddc11fef2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.842800] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.842800] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d71d0f2-c8a4-4e09-8476-e410c11d69a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.853548] env[65503]: DEBUG oslo_vmware.api [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 764.853548] env[65503]: value = "task-4449853" [ 764.853548] env[65503]: _type = "Task" [ 764.853548] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.871312] env[65503]: DEBUG oslo_vmware.api [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.875092] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 764.891939] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.891939] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0be09de4-a683-47f3-bd33-2f965a29e1fb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.900039] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 764.900039] env[65503]: value = "task-4449854" [ 764.900039] env[65503]: _type = "Task" [ 764.900039] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.913831] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449854, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.946726] env[65503]: WARNING neutronclient.v2_0.client [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 764.946726] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 764.946726] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 764.981239] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a91b63a1-9cd7-489e-9d4d-309250fe736b tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.663s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.057552] env[65503]: DEBUG nova.network.neutron [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Successfully created port: 73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 765.144856] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449849, 'name': ReconfigVM_Task, 'duration_secs': 0.608194} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.146991] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Reconfigured VM instance instance-0000002d to attach disk [datastore2] 5cefb589-9947-4fc1-89b4-d888f8c8f644/5cefb589-9947-4fc1-89b4-d888f8c8f644.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.146991] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09e71866-f554-496b-a773-cf8e32f85e44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.164321] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 765.164321] env[65503]: value = "task-4449855" [ 765.164321] env[65503]: _type = "Task" [ 765.164321] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.173110] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074037} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.174022] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.174446] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2d7017-af0b-4be7-8e92-2ff2cb0f7bb9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.181057] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449855, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.210396] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4/ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.211137] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41398bfb-1edc-40ca-9148-d7738d2c7dc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.237667] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 765.237667] env[65503]: value = "task-4449856" [ 765.237667] env[65503]: _type = "Task" [ 765.237667] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.249054] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449856, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.338225] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.365039] env[65503]: DEBUG oslo_vmware.api [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449853, 'name': PowerOffVM_Task, 'duration_secs': 0.22814} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.366236] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 765.366236] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 765.366236] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d32fa32-707e-46a6-ad0d-04bcc6f3caf7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.413969] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449854, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.455960] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 765.456414] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 765.456706] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Deleting the datastore file [datastore2] db942a2d-671b-4036-a80b-d2375145cd29 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 765.457270] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78fbe295-cfb6-4773-9d36-0c75a9749755 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.465155] env[65503]: DEBUG oslo_vmware.api [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for the task: (returnval){ [ 765.465155] env[65503]: value = "task-4449858" [ 765.465155] env[65503]: _type = "Task" [ 765.465155] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.481953] env[65503]: DEBUG oslo_vmware.api [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.484549] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 765.517843] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 765.521721] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 765.618015] env[65503]: DEBUG nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 765.653357] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 765.655945] env[65503]: DEBUG nova.virt.hardware [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 765.661131] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00f663a-dec0-4609-b253-d0e9d27aa1fa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.676440] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053acec0-d3f5-4221-b4ef-41e77b51747b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.684054] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449855, 'name': Rename_Task, 'duration_secs': 0.325428} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.684823] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.685047] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4591e846-becd-4020-895c-8f100c67a737 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.705195] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 765.705195] env[65503]: value = "task-4449859" [ 765.705195] env[65503]: _type = "Task" [ 765.705195] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.715947] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449859, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.753381] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449856, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.789865] env[65503]: WARNING neutronclient.v2_0.client [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 765.790890] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 765.791786] env[65503]: WARNING openstack [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 765.800565] env[65503]: DEBUG nova.network.neutron [-] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 765.803813] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a749caff-0673-4998-a1ba-e62951d028c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.813164] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f13352-6af7-4f09-86be-62e7308ef331 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.849608] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f1c94e-0cc0-4265-8c2b-a6474d52f5c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.863437] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcca770-457c-44c8-bf3b-df9bb7e7bbd6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.884140] env[65503]: DEBUG nova.compute.provider_tree [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.915732] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449854, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.926835] env[65503]: DEBUG nova.compute.manager [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Received event network-vif-deleted-6410d13c-8f5f-4943-818b-69c48368b69e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 765.926835] env[65503]: DEBUG nova.compute.manager [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Received event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 765.926835] env[65503]: DEBUG nova.compute.manager [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing instance network info cache due to event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 765.926835] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Acquiring lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.936867] env[65503]: DEBUG nova.network.neutron [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 765.947911] env[65503]: DEBUG nova.compute.manager [req-8de22986-888c-42c7-92b5-c0496e72f340 req-bebcdef8-b3d2-42c7-9194-a152b0a0559b service nova] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Received event network-vif-deleted-7dd31600-1222-4005-ad0f-74f4f93fcd4b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 765.977493] env[65503]: DEBUG oslo_vmware.api [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Task: {'id': task-4449858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138602} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.977835] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.978079] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.978332] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.978632] env[65503]: INFO nova.compute.manager [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Took 1.16 seconds to destroy the instance on the hypervisor. [ 765.978982] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 765.979240] env[65503]: DEBUG nova.compute.manager [-] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 765.979350] env[65503]: DEBUG nova.network.neutron [-] [instance: db942a2d-671b-4036-a80b-d2375145cd29] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 765.979641] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 765.980509] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 765.980923] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 766.010483] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.049161] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 766.218828] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449859, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.253247] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449856, 'name': ReconfigVM_Task, 'duration_secs': 0.930735} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.253629] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Reconfigured VM instance instance-0000002e to attach disk [datastore2] ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4/ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.254348] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5dabef38-8f6d-4d51-8932-9d42e62ffc85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.262595] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 766.262595] env[65503]: value = "task-4449860" [ 766.262595] env[65503]: _type = "Task" [ 766.262595] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.272615] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449860, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.307445] env[65503]: INFO nova.compute.manager [-] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Took 1.50 seconds to deallocate network for instance. [ 766.388104] env[65503]: DEBUG nova.scheduler.client.report [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 766.418302] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449854, 'name': PowerOffVM_Task, 'duration_secs': 1.101733} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.418612] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 766.418795] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 766.419677] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f69c86-ca97-40d7-bc70-20b4f56794bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.429413] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 766.429715] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-978fcbf0-1618-41fa-ab0f-0f0fb9267e9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.440062] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Releasing lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.440260] env[65503]: DEBUG nova.compute.manager [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Inject network info {{(pid=65503) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7777}} [ 766.440480] env[65503]: DEBUG nova.compute.manager [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] network_info to inject: |[{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7778}} [ 766.447534] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Reconfiguring VM instance to set the machine id {{(pid=65503) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 766.447911] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Acquired lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.448104] env[65503]: DEBUG nova.network.neutron [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 766.449602] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f37b66f9-cf71-455f-a303-c12d930f0119 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.462180] env[65503]: WARNING neutronclient.v2_0.client [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 766.463045] env[65503]: WARNING openstack [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 766.463378] env[65503]: WARNING openstack [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 766.478836] env[65503]: DEBUG oslo_vmware.api [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 766.478836] env[65503]: value = "task-4449862" [ 766.478836] env[65503]: _type = "Task" [ 766.478836] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.489932] env[65503]: DEBUG oslo_vmware.api [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449862, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.505134] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 766.505523] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 766.505843] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleting the datastore file [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 766.506682] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d474d04-257a-463d-8e4c-85b051d26d40 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.515015] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 766.515015] env[65503]: value = "task-4449863" [ 766.515015] env[65503]: _type = "Task" [ 766.515015] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.526719] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.623479] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "e74fe378-737a-4732-9a2d-b889a436b8a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.623937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "e74fe378-737a-4732-9a2d-b889a436b8a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.718293] env[65503]: DEBUG oslo_vmware.api [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449859, 'name': PowerOnVM_Task, 'duration_secs': 0.737324} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.721218] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 766.721442] env[65503]: INFO nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Took 11.03 seconds to spawn the instance on the hypervisor. [ 766.721626] env[65503]: DEBUG nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 766.723076] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8ac117-6b28-4b40-9b5b-d0f9c459757a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.779746] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449860, 'name': Rename_Task, 'duration_secs': 0.262891} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.780950] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.781240] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6287e450-748b-4202-9b54-88b99092576a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.792487] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 766.792487] env[65503]: value = "task-4449864" [ 766.792487] env[65503]: _type = "Task" [ 766.792487] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.804817] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.815270] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.856161] env[65503]: WARNING openstack [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 766.856584] env[65503]: WARNING openstack [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 766.870628] env[65503]: DEBUG nova.network.neutron [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Successfully updated port: 73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 766.893652] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.806s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.894277] env[65503]: DEBUG nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 766.897852] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.784s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.899909] env[65503]: INFO nova.compute.claims [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.988054] env[65503]: WARNING neutronclient.v2_0.client [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 766.988733] env[65503]: WARNING openstack [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 766.989096] env[65503]: WARNING openstack [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 767.008988] env[65503]: DEBUG oslo_vmware.api [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449862, 'name': ReconfigVM_Task, 'duration_secs': 0.18273} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.008988] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-94a2cf86-b6c9-4931-906b-7f3ca6a453c8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Reconfigured VM instance to set the machine id {{(pid=65503) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 767.025822] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162854} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.026185] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 767.026396] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 767.026573] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 767.123405] env[65503]: DEBUG nova.network.neutron [-] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 767.131034] env[65503]: DEBUG nova.network.neutron [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updated VIF entry in instance network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 767.131468] env[65503]: DEBUG nova.network.neutron [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 767.251145] env[65503]: INFO nova.compute.manager [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Took 22.84 seconds to build instance. [ 767.304376] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449864, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.375209] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.375209] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.375314] env[65503]: DEBUG nova.network.neutron [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 767.408329] env[65503]: DEBUG nova.compute.utils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 767.410495] env[65503]: DEBUG nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 767.410730] env[65503]: DEBUG nova.network.neutron [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 767.411190] env[65503]: WARNING neutronclient.v2_0.client [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 767.411405] env[65503]: WARNING neutronclient.v2_0.client [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 767.412104] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 767.412634] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 767.461476] env[65503]: DEBUG nova.policy [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5203efa0354baca5354d76cf3365c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf54348a3d0948cfa816cc3746e86806', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 767.626052] env[65503]: INFO nova.compute.manager [-] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Took 1.65 seconds to deallocate network for instance. [ 767.636590] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfb1bb3-87b5-46a5-ae3a-95a98fb3a6c1 req-5698e949-3218-4643-9402-7c8100ced35e service nova] Releasing lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.756592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ce365f-3344-4a97-a07e-c8da84f553a6 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.359s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.806523] env[65503]: DEBUG oslo_vmware.api [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449864, 'name': PowerOnVM_Task, 'duration_secs': 0.882158} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.806998] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.807407] env[65503]: INFO nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Took 9.20 seconds to spawn the instance on the hypervisor. [ 767.807720] env[65503]: DEBUG nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 767.808673] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f607f2f-03df-495c-80bd-463dd3ad8db4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.881021] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 767.881021] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 767.924729] env[65503]: DEBUG nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 767.941396] env[65503]: DEBUG nova.network.neutron [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 767.978907] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 767.981472] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 768.070253] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 768.070253] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 768.070253] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 768.071667] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 768.071667] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 768.071667] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 768.072167] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.072444] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 768.072758] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 768.073885] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 768.073885] env[65503]: DEBUG nova.virt.hardware [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 768.074752] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c87b876-d6f6-4b75-8028-3c35d2bc368d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.088538] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662dd562-154a-4418-a8a4-cf85bb1b26e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.105197] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:c2:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6164c166-4054-4e4a-93fb-6e84abe74f7d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 768.112773] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 768.120733] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 768.121685] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0066ea9f-41c4-48c9-b64a-a51b95218db6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.141024] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.148359] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 768.148359] env[65503]: value = "task-4449865" [ 768.148359] env[65503]: _type = "Task" [ 768.148359] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.158904] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449865, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.263027] env[65503]: WARNING neutronclient.v2_0.client [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 768.263027] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 768.263027] env[65503]: WARNING openstack [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 768.273081] env[65503]: DEBUG nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 768.333061] env[65503]: INFO nova.compute.manager [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Took 23.30 seconds to build instance. [ 768.365858] env[65503]: DEBUG nova.network.neutron [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Successfully created port: 6e3dc22f-5c81-48e2-9afd-c567f7010a9b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 768.423953] env[65503]: DEBUG nova.network.neutron [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73e98445-c9", "ovs_interfaceid": "73e98445-c951-4dc2-82e3-537e2196f82a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 768.579468] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4d824e-5263-4748-abd1-56f216ae1e12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.589543] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb2e7f5-becc-4f7e-a40d-00c09af594f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.627834] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0663b0c1-9c36-4fbd-a99a-70f8601425a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.637683] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281999a9-e905-4f1a-9f15-7438c46ec8e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.655625] env[65503]: DEBUG nova.compute.provider_tree [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.667085] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449865, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.708908] env[65503]: DEBUG nova.objects.instance [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lazy-loading 'flavor' on Instance uuid 0c0c6d3e-f4d2-458f-aa69-19f87a37f162 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 768.749414] env[65503]: DEBUG nova.compute.manager [req-9227d5b7-03e3-4233-bbfa-c4d0817dc6a5 req-429d323d-ff34-41b7-b480-6676ddd4bb87 service nova] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Received event network-vif-deleted-49fc5f6e-b681-4971-88d0-255d42df1686 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 768.749860] env[65503]: DEBUG nova.compute.manager [req-9227d5b7-03e3-4233-bbfa-c4d0817dc6a5 req-429d323d-ff34-41b7-b480-6676ddd4bb87 service nova] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Received event network-vif-deleted-74b1a7c2-cf5b-4c1a-80f0-caa7f80b8059 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 768.772162] env[65503]: DEBUG nova.compute.manager [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received event network-vif-plugged-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 768.772737] env[65503]: DEBUG oslo_concurrency.lockutils [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.772737] env[65503]: DEBUG oslo_concurrency.lockutils [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.772923] env[65503]: DEBUG oslo_concurrency.lockutils [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.773117] env[65503]: DEBUG nova.compute.manager [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] No waiting events found dispatching network-vif-plugged-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 768.773288] env[65503]: WARNING nova.compute.manager [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received unexpected event network-vif-plugged-73e98445-c951-4dc2-82e3-537e2196f82a for instance with vm_state building and task_state spawning. [ 768.773442] env[65503]: DEBUG nova.compute.manager [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received event network-changed-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 768.773592] env[65503]: DEBUG nova.compute.manager [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Refreshing instance network info cache due to event network-changed-73e98445-c951-4dc2-82e3-537e2196f82a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 768.773755] env[65503]: DEBUG oslo_concurrency.lockutils [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Acquiring lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.793355] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.835154] env[65503]: DEBUG oslo_concurrency.lockutils [None req-42c6bab1-2ef2-4ea8-b7f1-f606cb72e4f0 tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.817s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.877165] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.877165] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.877165] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.877380] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.877380] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.879678] env[65503]: INFO nova.compute.manager [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Terminating instance [ 768.933494] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.933929] env[65503]: DEBUG nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Instance network_info: |[{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73e98445-c9", "ovs_interfaceid": "73e98445-c951-4dc2-82e3-537e2196f82a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 768.934968] env[65503]: DEBUG oslo_concurrency.lockutils [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Acquired lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.934968] env[65503]: DEBUG nova.network.neutron [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Refreshing network info cache for port 73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 768.938364] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:11:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73e98445-c951-4dc2-82e3-537e2196f82a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 768.949334] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating folder: Project (f5a588e741704449878e7a03d7892d11). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 768.951157] env[65503]: WARNING neutronclient.v2_0.client [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 768.952105] env[65503]: WARNING openstack [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 768.952722] env[65503]: WARNING openstack [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 768.961509] env[65503]: DEBUG nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 768.963626] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-385148b2-1a9c-46fa-8b27-61aa131d6680 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.981176] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Created folder: Project (f5a588e741704449878e7a03d7892d11) in parent group-v870190. [ 768.981176] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating folder: Instances. Parent ref: group-v870330. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 768.981368] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6371154d-2c57-4a97-bbf3-9b65093757d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.994797] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Created folder: Instances in parent group-v870330. [ 768.995152] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 768.995423] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 768.995663] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ad65f83-404c-408d-974e-7da12a14348d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.014654] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 769.015681] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 769.015681] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 769.015681] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 769.015681] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 769.015681] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 769.016189] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 769.016189] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 769.016189] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 769.016189] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 769.016352] env[65503]: DEBUG nova.virt.hardware [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 769.017659] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c906650b-d594-4182-ae19-85a5a1fd3cc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.029214] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7898c8b2-a679-4063-b922-be34050255db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.033739] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.033739] env[65503]: value = "task-4449868" [ 769.033739] env[65503]: _type = "Task" [ 769.033739] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.052786] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449868, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.162738] env[65503]: DEBUG nova.scheduler.client.report [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.178105] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449865, 'name': CreateVM_Task, 'duration_secs': 0.630157} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.179034] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 769.179204] env[65503]: WARNING neutronclient.v2_0.client [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 769.180026] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.180498] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.180619] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 769.181084] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-960cb8d9-37e2-4ced-aaee-1827c3749abd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.189415] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 769.189415] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522b2941-30b7-a096-2e55-21fc7e1028b0" [ 769.189415] env[65503]: _type = "Task" [ 769.189415] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.208422] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522b2941-30b7-a096-2e55-21fc7e1028b0, 'name': SearchDatastore_Task, 'duration_secs': 0.012049} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.208955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.209377] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 769.209719] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.210037] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.210311] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 769.210732] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6529c872-f209-4c2d-8a74-783cec4b8955 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.217659] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.217910] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquired lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.218392] env[65503]: WARNING neutronclient.v2_0.client [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 769.231478] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 769.231802] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 769.233122] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc003f5a-5915-4b00-aa32-182099c55804 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.243736] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 769.243736] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cebb26-3859-0573-eb5a-82a1a91b2efb" [ 769.243736] env[65503]: _type = "Task" [ 769.243736] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.260030] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cebb26-3859-0573-eb5a-82a1a91b2efb, 'name': SearchDatastore_Task, 'duration_secs': 0.011687} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.261743] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de932b84-9835-4519-adde-36f62c089d85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.270898] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 769.270898] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e0caf1-e61e-a137-22a0-36674b8723e0" [ 769.270898] env[65503]: _type = "Task" [ 769.270898] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.281830] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e0caf1-e61e-a137-22a0-36674b8723e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.383991] env[65503]: DEBUG nova.compute.manager [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 769.384312] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 769.385326] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3d724c-0e7e-4efb-97a7-59244b197f44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.396102] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 769.396417] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfaebe2d-ea7c-4754-9a80-3defd4f50563 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.406180] env[65503]: DEBUG oslo_vmware.api [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 769.406180] env[65503]: value = "task-4449869" [ 769.406180] env[65503]: _type = "Task" [ 769.406180] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.420445] env[65503]: DEBUG oslo_vmware.api [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.459494] env[65503]: WARNING openstack [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 769.460028] env[65503]: WARNING openstack [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 769.547745] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449868, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.672069] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.672651] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 769.675602] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.442s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.678052] env[65503]: INFO nova.compute.claims [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.793688] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e0caf1-e61e-a137-22a0-36674b8723e0, 'name': SearchDatastore_Task, 'duration_secs': 0.011194} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.793978] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.794456] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 769.794598] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6bd8d07-f5e5-47d0-92a3-eba7b8e0dc38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.804569] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 769.804569] env[65503]: value = "task-4449870" [ 769.804569] env[65503]: _type = "Task" [ 769.804569] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.815689] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449870, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.841722] env[65503]: DEBUG nova.network.neutron [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 769.920555] env[65503]: DEBUG oslo_vmware.api [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449869, 'name': PowerOffVM_Task, 'duration_secs': 0.27986} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.920838] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 769.921011] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 769.921294] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6da406ea-1b34-49d8-a104-55ae62e9e684 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.968767] env[65503]: DEBUG nova.network.neutron [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Successfully updated port: 6e3dc22f-5c81-48e2-9afd-c567f7010a9b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 769.971897] env[65503]: WARNING neutronclient.v2_0.client [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 769.975019] env[65503]: WARNING openstack [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 769.975019] env[65503]: WARNING openstack [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 770.049871] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449868, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.052305] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 770.052402] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 770.052530] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Deleting the datastore file [datastore2] f8d61ded-ddf7-4ec9-88e7-92ffb6934733 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 770.052843] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7f4809d-c544-432c-a375-ad18b0db0799 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.061512] env[65503]: DEBUG oslo_vmware.api [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for the task: (returnval){ [ 770.061512] env[65503]: value = "task-4449872" [ 770.061512] env[65503]: _type = "Task" [ 770.061512] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.071117] env[65503]: DEBUG oslo_vmware.api [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.074279] env[65503]: DEBUG nova.network.neutron [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updated VIF entry in instance network info cache for port 73e98445-c951-4dc2-82e3-537e2196f82a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 770.074734] env[65503]: DEBUG nova.network.neutron [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73e98445-c9", "ovs_interfaceid": "73e98445-c951-4dc2-82e3-537e2196f82a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 770.183912] env[65503]: DEBUG nova.compute.utils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 770.189188] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 770.189188] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 770.189188] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 770.189188] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 770.189469] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 770.189911] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 770.276026] env[65503]: DEBUG nova.policy [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f57b27af8e943a5b9d7f49193a01c44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823ed6443dd4d85937ab2f08f476c8d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 770.319973] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449870, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.344717] env[65503]: WARNING neutronclient.v2_0.client [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 770.345527] env[65503]: WARNING openstack [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 770.345922] env[65503]: WARNING openstack [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 770.483876] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-a22f589e-7c40-4023-9a4c-9ab2a76faa94" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.484187] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-a22f589e-7c40-4023-9a4c-9ab2a76faa94" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.484407] env[65503]: DEBUG nova.network.neutron [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 770.549884] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449868, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.573024] env[65503]: WARNING openstack [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 770.573657] env[65503]: WARNING openstack [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 770.582128] env[65503]: DEBUG oslo_concurrency.lockutils [req-32bc6226-d48e-4582-b0d4-9238dc10baf3 req-3fbf6e3b-c62a-4a84-980e-fca07c8082a9 service nova] Releasing lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.597479] env[65503]: DEBUG oslo_vmware.api [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Task: {'id': task-4449872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175444} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.598210] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.598210] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 770.598342] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.598525] env[65503]: INFO nova.compute.manager [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Took 1.21 seconds to destroy the instance on the hypervisor. [ 770.598860] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 770.599161] env[65503]: DEBUG nova.compute.manager [-] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 770.599273] env[65503]: DEBUG nova.network.neutron [-] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 770.599620] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 770.600313] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 770.600639] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 770.688425] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 770.693470] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Successfully created port: f1edfbc1-d9c4-460c-90ae-43d46d3fe56a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 770.741024] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 770.774144] env[65503]: WARNING neutronclient.v2_0.client [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 770.774854] env[65503]: WARNING openstack [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 770.775295] env[65503]: WARNING openstack [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 770.818780] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449870, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.990971} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.821871] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 770.822408] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 770.823083] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f1b142e-0616-4e51-8e32-02e7f32ecb48 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.834026] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 770.834026] env[65503]: value = "task-4449873" [ 770.834026] env[65503]: _type = "Task" [ 770.834026] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.846517] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449873, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.990136] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 770.990773] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 771.049062] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449868, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.304747] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2ac24b-5a0d-43a6-aa3c-4aca842bffc9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.314273] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319a623c-e6f1-4e18-bdca-8296e90c4360 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.350540] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4c449f-696c-48f4-a87c-2d1194026f58 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.361600] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa7f9b6-2689-431b-9656-1ac043205544 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.365632] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449873, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.260541} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.365938] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 771.367068] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce40ee24-4784-4ba1-931e-0eceb491ab17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.379331] env[65503]: DEBUG nova.compute.provider_tree [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.401116] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 771.403198] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c2ab1dd-b51e-49f3-8b7c-0f1e75e6dc99 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.428044] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 771.428044] env[65503]: value = "task-4449874" [ 771.428044] env[65503]: _type = "Task" [ 771.428044] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.443256] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.548224] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449868, 'name': CreateVM_Task, 'duration_secs': 2.06173} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.549357] env[65503]: DEBUG nova.network.neutron [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 771.551607] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 771.552706] env[65503]: WARNING neutronclient.v2_0.client [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 771.553099] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.553259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.553557] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 771.553831] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d50c648-96e1-4d89-8dd4-cf99e72eba6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.559737] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 771.559737] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52affe15-340c-59e0-c6de-2d7c37ea7684" [ 771.559737] env[65503]: _type = "Task" [ 771.559737] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.571658] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52affe15-340c-59e0-c6de-2d7c37ea7684, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.719494] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 771.749365] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 771.749643] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 771.749867] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 771.749996] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 771.750111] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 771.750255] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 771.750617] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 771.750844] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 771.751033] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 771.751202] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 771.751419] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 771.752574] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3eff8d2-cbed-4a62-b75e-eff4e463fe12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.762519] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc1cceb-7a55-44a1-bc61-6c2ffc7c1fd1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.851281] env[65503]: DEBUG nova.network.neutron [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 771.882407] env[65503]: DEBUG nova.scheduler.client.report [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.892551] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 771.894211] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 771.938133] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449874, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.020449] env[65503]: WARNING neutronclient.v2_0.client [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 772.021151] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 772.021516] env[65503]: WARNING openstack [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 772.073398] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52affe15-340c-59e0-c6de-2d7c37ea7684, 'name': SearchDatastore_Task, 'duration_secs': 0.011578} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.073929] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.073929] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 772.074547] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.074547] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.074835] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 772.077044] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4abdb0ce-a970-4461-94a3-4615ac8c6118 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.087022] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 772.087369] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 772.088304] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea8f0503-8aca-4971-a338-152010087fc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.096314] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 772.096314] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52efbec9-ca6a-6a62-fb67-baaeb8c4e75f" [ 772.096314] env[65503]: _type = "Task" [ 772.096314] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.111054] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52efbec9-ca6a-6a62-fb67-baaeb8c4e75f, 'name': SearchDatastore_Task, 'duration_secs': 0.01049} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.112104] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83d1d344-0183-4c40-b1ed-df8a46c6534b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.119502] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 772.119502] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526eff01-cfe5-32ce-4e27-0c4eda559f11" [ 772.119502] env[65503]: _type = "Task" [ 772.119502] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.130231] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526eff01-cfe5-32ce-4e27-0c4eda559f11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.230587] env[65503]: DEBUG nova.network.neutron [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Updating instance_info_cache with network_info: [{"id": "6e3dc22f-5c81-48e2-9afd-c567f7010a9b", "address": "fa:16:3e:2f:24:0b", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3dc22f-5c", "ovs_interfaceid": "6e3dc22f-5c81-48e2-9afd-c567f7010a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 772.315506] env[65503]: DEBUG nova.network.neutron [-] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 772.356950] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Releasing lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.356950] env[65503]: DEBUG nova.compute.manager [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Inject network info {{(pid=65503) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7777}} [ 772.356950] env[65503]: DEBUG nova.compute.manager [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] network_info to inject: |[{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7778}} [ 772.360984] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Reconfiguring VM instance to set the machine id {{(pid=65503) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 772.361381] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70778c9d-9912-468d-85ec-0ab10107ce90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.383263] env[65503]: DEBUG oslo_vmware.api [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 772.383263] env[65503]: value = "task-4449875" [ 772.383263] env[65503]: _type = "Task" [ 772.383263] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.391936] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.392616] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 772.399875] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.555s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.402049] env[65503]: INFO nova.compute.claims [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.404834] env[65503]: DEBUG oslo_vmware.api [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449875, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.412494] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Successfully updated port: f1edfbc1-d9c4-460c-90ae-43d46d3fe56a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 772.441050] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449874, 'name': ReconfigVM_Task, 'duration_secs': 0.910218} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.441350] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Reconfigured VM instance instance-00000005 to attach disk [datastore1] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 772.443306] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63d7fd74-afb8-4356-8644-193205e8dfed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.451059] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 772.451059] env[65503]: value = "task-4449876" [ 772.451059] env[65503]: _type = "Task" [ 772.451059] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.464629] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449876, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.635888] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526eff01-cfe5-32ce-4e27-0c4eda559f11, 'name': SearchDatastore_Task, 'duration_secs': 0.010238} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.636210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.636470] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 772.636750] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8538f3e-3c61-4138-8511-ab8bf7a0155a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.645226] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 772.645226] env[65503]: value = "task-4449877" [ 772.645226] env[65503]: _type = "Task" [ 772.645226] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.655772] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449877, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.737242] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-a22f589e-7c40-4023-9a4c-9ab2a76faa94" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.739937] env[65503]: DEBUG nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Instance network_info: |[{"id": "6e3dc22f-5c81-48e2-9afd-c567f7010a9b", "address": "fa:16:3e:2f:24:0b", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3dc22f-5c", "ovs_interfaceid": "6e3dc22f-5c81-48e2-9afd-c567f7010a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 772.739937] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:24:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e3dc22f-5c81-48e2-9afd-c567f7010a9b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.747213] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 772.747213] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.747368] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4f0329c-5b7d-4876-bb6a-8e9f0f5aab5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.769667] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.769667] env[65503]: value = "task-4449878" [ 772.769667] env[65503]: _type = "Task" [ 772.769667] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.779904] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449878, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.822290] env[65503]: INFO nova.compute.manager [-] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Took 2.22 seconds to deallocate network for instance. [ 772.897073] env[65503]: DEBUG oslo_vmware.api [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449875, 'name': ReconfigVM_Task, 'duration_secs': 0.173412} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.897073] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc72ae5b-e15b-4651-b033-1f6cea22c5f8 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Reconfigured VM instance to set the machine id {{(pid=65503) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 772.910307] env[65503]: DEBUG nova.compute.utils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 772.911494] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 772.911741] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 772.912140] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 772.914315] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 772.914315] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 772.914315] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 772.921951] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "refresh_cache-1bda7a65-0231-4753-9762-43e9b13bd893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.922165] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "refresh_cache-1bda7a65-0231-4753-9762-43e9b13bd893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.922329] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 772.964034] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449876, 'name': Rename_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.966215] env[65503]: DEBUG nova.compute.manager [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Received event network-vif-plugged-6e3dc22f-5c81-48e2-9afd-c567f7010a9b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 772.967180] env[65503]: DEBUG oslo_concurrency.lockutils [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Acquiring lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.967180] env[65503]: DEBUG oslo_concurrency.lockutils [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.967180] env[65503]: DEBUG oslo_concurrency.lockutils [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.967180] env[65503]: DEBUG nova.compute.manager [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] No waiting events found dispatching network-vif-plugged-6e3dc22f-5c81-48e2-9afd-c567f7010a9b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 772.967180] env[65503]: WARNING nova.compute.manager [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Received unexpected event network-vif-plugged-6e3dc22f-5c81-48e2-9afd-c567f7010a9b for instance with vm_state building and task_state spawning. [ 772.967475] env[65503]: DEBUG nova.compute.manager [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Received event network-changed-6e3dc22f-5c81-48e2-9afd-c567f7010a9b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 772.967475] env[65503]: DEBUG nova.compute.manager [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Refreshing instance network info cache due to event network-changed-6e3dc22f-5c81-48e2-9afd-c567f7010a9b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 772.967645] env[65503]: DEBUG oslo_concurrency.lockutils [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Acquiring lock "refresh_cache-a22f589e-7c40-4023-9a4c-9ab2a76faa94" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.967859] env[65503]: DEBUG oslo_concurrency.lockutils [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Acquired lock "refresh_cache-a22f589e-7c40-4023-9a4c-9ab2a76faa94" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.968126] env[65503]: DEBUG nova.network.neutron [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Refreshing network info cache for port 6e3dc22f-5c81-48e2-9afd-c567f7010a9b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 772.989271] env[65503]: DEBUG nova.compute.manager [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Received event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 772.989433] env[65503]: DEBUG nova.compute.manager [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing instance network info cache due to event network-changed-7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 772.989671] env[65503]: DEBUG oslo_concurrency.lockutils [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Acquiring lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.989813] env[65503]: DEBUG oslo_concurrency.lockutils [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Acquired lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.989973] env[65503]: DEBUG nova.network.neutron [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Refreshing network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 773.054342] env[65503]: DEBUG nova.policy [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f57b27af8e943a5b9d7f49193a01c44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823ed6443dd4d85937ab2f08f476c8d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 773.159198] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449877, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.282044] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449878, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.309754] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "5cefb589-9947-4fc1-89b4-d888f8c8f644" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.310085] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.310266] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "5cefb589-9947-4fc1-89b4-d888f8c8f644-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.310528] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.310627] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.313108] env[65503]: INFO nova.compute.manager [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Terminating instance [ 773.329458] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.422354] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 773.430685] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 773.430685] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 773.462429] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449876, 'name': Rename_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.471402] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Successfully created port: 4fb9999c-b567-4158-9058-b0c36232af7c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 773.474110] env[65503]: WARNING neutronclient.v2_0.client [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 773.474752] env[65503]: WARNING openstack [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 773.475233] env[65503]: WARNING openstack [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 773.497060] env[65503]: WARNING neutronclient.v2_0.client [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 773.497060] env[65503]: WARNING openstack [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 773.497608] env[65503]: WARNING openstack [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 773.557249] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 773.662051] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449877, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55014} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.662705] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.662705] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.665079] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a273f9fd-3cf1-474b-83ae-8e9425da7e41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.674249] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 773.674249] env[65503]: value = "task-4449879" [ 773.674249] env[65503]: _type = "Task" [ 773.674249] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.688446] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449879, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.783122] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449878, 'name': CreateVM_Task, 'duration_secs': 0.630153} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.783343] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.783865] env[65503]: WARNING neutronclient.v2_0.client [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 773.784361] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.784725] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.784802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 773.785089] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5133b4d9-e412-477d-97f8-b52fa7f22fea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.797170] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 773.797170] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526bfc35-b32e-2585-fa1d-6b17160e7bb9" [ 773.797170] env[65503]: _type = "Task" [ 773.797170] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.805832] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526bfc35-b32e-2585-fa1d-6b17160e7bb9, 'name': SearchDatastore_Task, 'duration_secs': 0.011413} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.809206] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.809480] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.809722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.809865] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.810313] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 773.811027] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4528340-2515-4dc0-80b1-b71422e87caf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.817190] env[65503]: DEBUG nova.compute.manager [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 773.817272] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.818167] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe91e76-7d8a-4975-b3ac-938bb8e5735c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.822781] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 773.823026] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 773.826642] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0894449-b2dd-4502-9cd5-fff4b86e4e04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.838174] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 773.838174] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5aebf6b-d2ae-476b-b068-4b50300aa9c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.842095] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 773.842095] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f7222e-2558-671d-a652-da1b7e8da383" [ 773.842095] env[65503]: _type = "Task" [ 773.842095] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.847977] env[65503]: DEBUG oslo_vmware.api [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 773.847977] env[65503]: value = "task-4449880" [ 773.847977] env[65503]: _type = "Task" [ 773.847977] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.854911] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f7222e-2558-671d-a652-da1b7e8da383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.865557] env[65503]: DEBUG oslo_vmware.api [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449880, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.965444] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449876, 'name': Rename_Task, 'duration_secs': 1.257403} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.965730] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 773.966072] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42376338-2396-45bc-9230-cf0445113dea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.976091] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 773.976091] env[65503]: value = "task-4449881" [ 773.976091] env[65503]: _type = "Task" [ 773.976091] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.991642] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.095892] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 774.097070] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 774.160311] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff78910-dd19-4118-a7e8-85615a7d5ffa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.174531] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3623a2fb-69b7-430e-a6f3-4c1e459cb0bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.187898] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449879, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078161} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.216851] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.217884] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9467960e-439c-4f72-ae59-7ee85c6d3a0f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.221258] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef617091-de06-4826-9a1a-ee15f432dd92 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.246658] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.250254] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3ce6c2d-4940-4414-8815-67ab87d8e445 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.265176] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68d88f9-3199-4cf9-bac9-ddef6863ec5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.282266] env[65503]: DEBUG nova.compute.provider_tree [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.285433] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 774.285433] env[65503]: value = "task-4449882" [ 774.285433] env[65503]: _type = "Task" [ 774.285433] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.296454] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449882, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.354464] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f7222e-2558-671d-a652-da1b7e8da383, 'name': SearchDatastore_Task, 'duration_secs': 0.015349} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.359251] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64da23bc-698e-4d23-8918-d98030c4e5bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.368411] env[65503]: DEBUG oslo_vmware.api [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449880, 'name': PowerOffVM_Task, 'duration_secs': 0.380114} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.369944] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 774.370143] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 774.370467] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 774.370467] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cd1ab9-0bec-9e4c-eb27-dd11c27a5652" [ 774.370467] env[65503]: _type = "Task" [ 774.370467] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.370717] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df24643b-04ac-4719-9135-e5ec7f7b0946 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.382476] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cd1ab9-0bec-9e4c-eb27-dd11c27a5652, 'name': SearchDatastore_Task, 'duration_secs': 0.010713} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.382768] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.383057] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 774.383343] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0461ac84-23f7-4f18-838a-6c782cbdd524 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.391681] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 774.391681] env[65503]: value = "task-4449884" [ 774.391681] env[65503]: _type = "Task" [ 774.391681] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.402643] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.459509] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 774.482536] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 774.482893] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 774.483113] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Deleting the datastore file [datastore2] 5cefb589-9947-4fc1-89b4-d888f8c8f644 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.484278] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63a4ce49-9d35-4720-abde-ce4d61102214 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.495652] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449881, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 774.502855] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 774.503562] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759154be-628f-4f2c-a10b-076d3bee3b02 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.507339] env[65503]: DEBUG oslo_vmware.api [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for the task: (returnval){ [ 774.507339] env[65503]: value = "task-4449885" [ 774.507339] env[65503]: _type = "Task" [ 774.507339] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.520205] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6938846-3980-43fd-a922-0c495727a7ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.532029] env[65503]: DEBUG oslo_vmware.api [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449885, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.791970] env[65503]: DEBUG nova.scheduler.client.report [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.805071] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449882, 'name': ReconfigVM_Task, 'duration_secs': 0.336916} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.805582] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Reconfigured VM instance instance-0000002f to attach disk [datastore1] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.806665] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6fa8d011-a27b-47dd-953a-4d4af2df6a88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.817957] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 774.817957] env[65503]: value = "task-4449886" [ 774.817957] env[65503]: _type = "Task" [ 774.817957] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.829869] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449886, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.851680] env[65503]: WARNING openstack [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 774.854849] env[65503]: WARNING openstack [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 774.897027] env[65503]: WARNING openstack [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 774.897027] env[65503]: WARNING openstack [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 774.922491] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449884, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.949805] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 774.950979] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 774.955184] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 774.990847] env[65503]: DEBUG oslo_vmware.api [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449881, 'name': PowerOnVM_Task, 'duration_secs': 0.802132} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.991382] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 774.991924] env[65503]: DEBUG nova.compute.manager [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 774.993071] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2603e71-2168-4515-8fdd-1918ea319db4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.021919] env[65503]: DEBUG oslo_vmware.api [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Task: {'id': task-4449885, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.43972} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.022317] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.022762] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 775.022948] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.023156] env[65503]: INFO nova.compute.manager [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Took 1.21 seconds to destroy the instance on the hypervisor. [ 775.023426] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 775.028939] env[65503]: DEBUG nova.compute.manager [-] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 775.029113] env[65503]: DEBUG nova.network.neutron [-] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 775.029431] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 775.030060] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 775.030414] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 775.232840] env[65503]: WARNING neutronclient.v2_0.client [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 775.233725] env[65503]: WARNING openstack [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 775.234707] env[65503]: WARNING openstack [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 775.295202] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 775.299044] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.899s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.299632] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 775.303110] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Successfully updated port: 4fb9999c-b567-4158-9058-b0c36232af7c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 775.304295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.929s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.304514] env[65503]: DEBUG nova.objects.instance [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lazy-loading 'resources' on Instance uuid 34008711-b51b-467b-b972-bfda1023d696 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 775.324722] env[65503]: WARNING neutronclient.v2_0.client [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 775.325416] env[65503]: WARNING openstack [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 775.325763] env[65503]: WARNING openstack [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 775.346190] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449886, 'name': Rename_Task, 'duration_secs': 0.289636} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.349122] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Updating instance_info_cache with network_info: [{"id": "f1edfbc1-d9c4-460c-90ae-43d46d3fe56a", "address": "fa:16:3e:47:b3:eb", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1edfbc1-d9", "ovs_interfaceid": "f1edfbc1-d9c4-460c-90ae-43d46d3fe56a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 775.350598] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.351832] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d587cfc4-2644-4bfb-a4a1-5280f1db5b9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.363229] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 775.363229] env[65503]: value = "task-4449887" [ 775.363229] env[65503]: _type = "Task" [ 775.363229] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.373794] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.416670] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601146} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.416946] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 775.417281] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 775.417530] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fd4717a-dc53-4db0-a1a3-c8116ddcf3c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.427989] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 775.427989] env[65503]: value = "task-4449888" [ 775.427989] env[65503]: _type = "Task" [ 775.427989] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.443643] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449888, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.531126] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.610320] env[65503]: DEBUG nova.network.neutron [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Updated VIF entry in instance network info cache for port 6e3dc22f-5c81-48e2-9afd-c567f7010a9b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 775.610764] env[65503]: DEBUG nova.network.neutron [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Updating instance_info_cache with network_info: [{"id": "6e3dc22f-5c81-48e2-9afd-c567f7010a9b", "address": "fa:16:3e:2f:24:0b", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3dc22f-5c", "ovs_interfaceid": "6e3dc22f-5c81-48e2-9afd-c567f7010a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 775.707680] env[65503]: DEBUG nova.network.neutron [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updated VIF entry in instance network info cache for port 7dcf2e2a-4e77-459c-9936-568c34f49a33. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 775.707905] env[65503]: DEBUG nova.network.neutron [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [{"id": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "address": "fa:16:3e:4a:dc:36", "network": {"id": "d86429bf-deca-464e-a1ca-79eafee9ebd1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-267775540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf44cd2ee0e4906bcdc3d16dfe7c838", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcf2e2a-4e", "ovs_interfaceid": "7dcf2e2a-4e77-459c-9936-568c34f49a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 775.714619] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.714619] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.812081] env[65503]: DEBUG nova.compute.utils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 775.814934] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "refresh_cache-f840b178-fd54-4c84-808c-a14c99a5ecdd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.814934] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "refresh_cache-f840b178-fd54-4c84-808c-a14c99a5ecdd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.814934] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 775.820972] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 775.821359] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 775.821765] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 775.823039] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 775.825815] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 775.825815] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 775.839784] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 775.840136] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 775.853948] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "refresh_cache-1bda7a65-0231-4753-9762-43e9b13bd893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.853948] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Instance network_info: |[{"id": "f1edfbc1-d9c4-460c-90ae-43d46d3fe56a", "address": "fa:16:3e:47:b3:eb", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1edfbc1-d9", "ovs_interfaceid": "f1edfbc1-d9c4-460c-90ae-43d46d3fe56a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 775.854763] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:b3:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1edfbc1-d9c4-460c-90ae-43d46d3fe56a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 775.866080] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Creating folder: Project (0823ed6443dd4d85937ab2f08f476c8d). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.866984] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd97916c-977b-48b7-8b33-8f162a20a5a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.884186] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449887, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.894124] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Created folder: Project (0823ed6443dd4d85937ab2f08f476c8d) in parent group-v870190. [ 775.894505] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Creating folder: Instances. Parent ref: group-v870334. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.894794] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-438f91ee-72b2-483d-a4a3-e14e2285696f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.909026] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Created folder: Instances in parent group-v870334. [ 775.909522] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 775.909667] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 775.909918] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89e06526-bf63-4bcb-9a98-14c12a108c88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.940020] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.940020] env[65503]: value = "task-4449891" [ 775.940020] env[65503]: _type = "Task" [ 775.940020] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.943469] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449888, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14502} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.947059] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.948220] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 775.951398] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc0744f-a7a6-4d7f-881d-b09063967326 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.965480] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449891, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.986636] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.989826] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6db843eb-0734-4033-b289-c0b4442a661f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.008567] env[65503]: DEBUG nova.policy [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f57b27af8e943a5b9d7f49193a01c44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823ed6443dd4d85937ab2f08f476c8d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 776.020141] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 776.020141] env[65503]: value = "task-4449892" [ 776.020141] env[65503]: _type = "Task" [ 776.020141] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.035557] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449892, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.041389] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 776.044287] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 776.116229] env[65503]: DEBUG oslo_concurrency.lockutils [req-132474a8-084d-4775-a4fc-6c40862bfb9f req-f530823b-d2a5-4c0e-83ff-77268bbc8208 service nova] Releasing lock "refresh_cache-a22f589e-7c40-4023-9a4c-9ab2a76faa94" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.216242] env[65503]: DEBUG oslo_concurrency.lockutils [req-ca24b3e3-4699-440c-a2e3-36cdef082282 req-21e54ea1-34b2-487d-960e-b4b4187ae35e service nova] Releasing lock "refresh_cache-0c0c6d3e-f4d2-458f-aa69-19f87a37f162" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.239415] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.239716] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.239781] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.240664] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.240855] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.241055] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.243887] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 776.243887] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.244164] env[65503]: DEBUG nova.network.neutron [-] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 776.317467] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 776.378646] env[65503]: DEBUG oslo_vmware.api [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4449887, 'name': PowerOnVM_Task, 'duration_secs': 0.557821} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.378646] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 776.378772] env[65503]: INFO nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Took 10.76 seconds to spawn the instance on the hypervisor. [ 776.378882] env[65503]: DEBUG nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 776.380054] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2f1e64-8999-4415-b460-77f1c0e7a3d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.416822] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Successfully created port: 264a9149-95a0-4c4c-89d4-578b30882bcb {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 776.455016] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449891, 'name': CreateVM_Task, 'duration_secs': 0.426067} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.455462] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.456808] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 776.456808] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.456808] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.456808] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 776.457033] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81b21cfa-7bd8-4d5d-a52a-24e1373af8c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.462564] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 776.462564] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bfb71f-e71a-f948-b835-19f7e6ff9242" [ 776.462564] env[65503]: _type = "Task" [ 776.462564] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.474867] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bfb71f-e71a-f948-b835-19f7e6ff9242, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.531171] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.574687] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd37143-69ca-49ae-8002-db35ceb1e487 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.583567] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a3c341-cc4e-422b-93f8-85a67be0ef7c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.617374] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e477269-9b34-451a-a541-d859ef91bd70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.625563] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986d9d6b-ee20-4d30-868a-9c08b3fa7470 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.640433] env[65503]: DEBUG nova.compute.provider_tree [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.702084] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 776.702084] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 776.702223] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 776.746364] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.748554] env[65503]: INFO nova.compute.manager [-] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Took 1.72 seconds to deallocate network for instance. [ 776.904192] env[65503]: INFO nova.compute.manager [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Took 29.49 seconds to build instance. [ 776.984860] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bfb71f-e71a-f948-b835-19f7e6ff9242, 'name': SearchDatastore_Task, 'duration_secs': 0.028096} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.985390] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.985463] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.986038] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.986038] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.986038] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.986238] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d48cdc0-ddf8-432e-82cc-658efcf09491 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.000288] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.000525] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 777.005815] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1552cc52-7af5-4665-8927-879d5485e6ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.014554] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 777.014554] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ab34ef-aed1-efc9-e071-19ee60aa9323" [ 777.014554] env[65503]: _type = "Task" [ 777.014554] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.026296] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ab34ef-aed1-efc9-e071-19ee60aa9323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.036424] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449892, 'name': ReconfigVM_Task, 'duration_secs': 0.773582} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.036526] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Reconfigured VM instance instance-00000030 to attach disk [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.037361] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ce7ddec-9c19-48ca-b3c0-9fe54fdc309f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.045036] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 777.045036] env[65503]: value = "task-4449893" [ 777.045036] env[65503]: _type = "Task" [ 777.045036] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.057024] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449893, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.144268] env[65503]: DEBUG nova.scheduler.client.report [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.215611] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Updating instance_info_cache with network_info: [{"id": "4fb9999c-b567-4158-9058-b0c36232af7c", "address": "fa:16:3e:ef:81:9a", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb9999c-b5", "ovs_interfaceid": "4fb9999c-b567-4158-9058-b0c36232af7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 777.258379] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.329014] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 777.364531] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 777.364803] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.364973] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 777.365399] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.365613] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 777.365885] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 777.368060] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 777.368060] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 777.368060] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 777.368060] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 777.368060] env[65503]: DEBUG nova.virt.hardware [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 777.369648] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0332e9af-19b2-4d2c-a18e-a49340d7493e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.379878] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b03ae2-f1c4-4353-bd1e-4b19460ca141 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.406565] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4b6630cd-99f8-427a-aa7d-8e7495c0e4ea tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.003s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.528038] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ab34ef-aed1-efc9-e071-19ee60aa9323, 'name': SearchDatastore_Task, 'duration_secs': 0.029724} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.529247] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c38e0886-fadf-4871-8bea-13b36ff09383 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.536715] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 777.536715] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52682325-578a-7bbe-7880-b99aa695db43" [ 777.536715] env[65503]: _type = "Task" [ 777.536715] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.547692] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52682325-578a-7bbe-7880-b99aa695db43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.557806] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449893, 'name': Rename_Task, 'duration_secs': 0.1818} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.558105] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 777.558405] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4e4467f-05fa-42d4-8d67-fe502d6d22c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.566312] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 777.566312] env[65503]: value = "task-4449894" [ 777.566312] env[65503]: _type = "Task" [ 777.566312] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.576712] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.650667] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.346s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.654371] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.908s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.688015] env[65503]: INFO nova.scheduler.client.report [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Deleted allocations for instance 34008711-b51b-467b-b972-bfda1023d696 [ 777.722751] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "refresh_cache-f840b178-fd54-4c84-808c-a14c99a5ecdd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.723159] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Instance network_info: |[{"id": "4fb9999c-b567-4158-9058-b0c36232af7c", "address": "fa:16:3e:ef:81:9a", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb9999c-b5", "ovs_interfaceid": "4fb9999c-b567-4158-9058-b0c36232af7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 777.723743] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:81:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fb9999c-b567-4158-9058-b0c36232af7c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.731507] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 777.731771] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.732027] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d2c09ac-06e0-49d8-ac88-dd83ce43dc3d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.757656] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.757656] env[65503]: value = "task-4449895" [ 777.757656] env[65503]: _type = "Task" [ 777.757656] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.768669] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449895, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.056261] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52682325-578a-7bbe-7880-b99aa695db43, 'name': SearchDatastore_Task, 'duration_secs': 0.014376} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.056673] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.057167] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 1bda7a65-0231-4753-9762-43e9b13bd893/1bda7a65-0231-4753-9762-43e9b13bd893.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 778.058654] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7d82d32-ada2-4251-9931-27f4ab40c099 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.077126] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 778.077126] env[65503]: value = "task-4449896" [ 778.077126] env[65503]: _type = "Task" [ 778.077126] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.086752] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449894, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.093399] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.159231] env[65503]: INFO nova.compute.claims [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.169516] env[65503]: DEBUG nova.compute.manager [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 778.169771] env[65503]: DEBUG nova.compute.manager [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing instance network info cache due to event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 778.174025] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Acquiring lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.174025] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Acquired lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.174025] env[65503]: DEBUG nova.network.neutron [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 778.199417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-049fd56b-9397-4b4a-8e47-4a4c77ac0645 tempest-ServersNegativeTestMultiTenantJSON-976867500 tempest-ServersNegativeTestMultiTenantJSON-976867500-project-member] Lock "34008711-b51b-467b-b972-bfda1023d696" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.472s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.203903] env[65503]: DEBUG nova.compute.manager [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Received event network-vif-plugged-f1edfbc1-d9c4-460c-90ae-43d46d3fe56a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 778.203903] env[65503]: DEBUG oslo_concurrency.lockutils [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Acquiring lock "1bda7a65-0231-4753-9762-43e9b13bd893-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.203903] env[65503]: DEBUG oslo_concurrency.lockutils [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Lock "1bda7a65-0231-4753-9762-43e9b13bd893-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.203903] env[65503]: DEBUG oslo_concurrency.lockutils [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Lock "1bda7a65-0231-4753-9762-43e9b13bd893-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.204306] env[65503]: DEBUG nova.compute.manager [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] No waiting events found dispatching network-vif-plugged-f1edfbc1-d9c4-460c-90ae-43d46d3fe56a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 778.204306] env[65503]: WARNING nova.compute.manager [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Received unexpected event network-vif-plugged-f1edfbc1-d9c4-460c-90ae-43d46d3fe56a for instance with vm_state building and task_state spawning. [ 778.204380] env[65503]: DEBUG nova.compute.manager [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Received event network-changed-f1edfbc1-d9c4-460c-90ae-43d46d3fe56a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 778.205033] env[65503]: DEBUG nova.compute.manager [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Refreshing instance network info cache due to event network-changed-f1edfbc1-d9c4-460c-90ae-43d46d3fe56a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 778.205033] env[65503]: DEBUG oslo_concurrency.lockutils [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Acquiring lock "refresh_cache-1bda7a65-0231-4753-9762-43e9b13bd893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.205033] env[65503]: DEBUG oslo_concurrency.lockutils [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Acquired lock "refresh_cache-1bda7a65-0231-4753-9762-43e9b13bd893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.205033] env[65503]: DEBUG nova.network.neutron [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Refreshing network info cache for port f1edfbc1-d9c4-460c-90ae-43d46d3fe56a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 778.221301] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Successfully updated port: 264a9149-95a0-4c4c-89d4-578b30882bcb {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 778.270868] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449895, 'name': CreateVM_Task, 'duration_secs': 0.442688} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.271184] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.271705] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.272184] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.272339] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.272655] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 778.273487] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90c9aa0b-7fbc-4ca2-916b-0587f99ef6a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.281258] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 778.281258] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f8fd3f-6db7-afce-feb1-c334d259d4cf" [ 778.281258] env[65503]: _type = "Task" [ 778.281258] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.291502] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f8fd3f-6db7-afce-feb1-c334d259d4cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.494030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.494030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.494030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.494030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.494030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.497464] env[65503]: INFO nova.compute.manager [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Terminating instance [ 778.579302] env[65503]: DEBUG oslo_vmware.api [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449894, 'name': PowerOnVM_Task, 'duration_secs': 0.715489} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.586686] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 778.586927] env[65503]: INFO nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Took 9.63 seconds to spawn the instance on the hypervisor. [ 778.587241] env[65503]: DEBUG nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 778.588600] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a27fdea-ee6d-4f25-9255-7edcdd124a03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.599176] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449896, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.670341] env[65503]: INFO nova.compute.resource_tracker [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating resource usage from migration 6e2d81c5-8374-425a-bd6e-e2603e1be940 [ 778.684050] env[65503]: WARNING neutronclient.v2_0.client [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.684050] env[65503]: WARNING openstack [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.684050] env[65503]: WARNING openstack [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.708769] env[65503]: WARNING neutronclient.v2_0.client [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.711157] env[65503]: WARNING openstack [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.711880] env[65503]: WARNING openstack [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.729043] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "refresh_cache-62a18449-7cec-4785-a340-d0450adc8044" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.729265] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "refresh_cache-62a18449-7cec-4785-a340-d0450adc8044" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.729430] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 778.802168] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.802482] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.803299] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.803299] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.803625] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.805773] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f8fd3f-6db7-afce-feb1-c334d259d4cf, 'name': SearchDatastore_Task, 'duration_secs': 0.01118} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.805773] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.805773] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.805925] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.806051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.806327] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.807240] env[65503]: INFO nova.compute.manager [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Terminating instance [ 778.810835] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c76f34a-d824-4330-b25e-c9a4b247d1b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.822937] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.823225] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.827302] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e489d709-609e-425f-b359-60dd285df7aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.845654] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 778.845654] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52994bcc-0e56-5033-2675-635f5ee0f2d3" [ 778.845654] env[65503]: _type = "Task" [ 778.845654] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.856104] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52994bcc-0e56-5033-2675-635f5ee0f2d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.864554] env[65503]: WARNING openstack [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.865085] env[65503]: WARNING openstack [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.881325] env[65503]: WARNING openstack [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.881737] env[65503]: WARNING openstack [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.936200] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "1c598208-a4d0-46b8-9a9c-107353e957b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.937052] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.937052] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "1c598208-a4d0-46b8-9a9c-107353e957b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.937052] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.937052] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.939339] env[65503]: INFO nova.compute.manager [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Terminating instance [ 778.951764] env[65503]: WARNING neutronclient.v2_0.client [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.952449] env[65503]: WARNING openstack [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.952742] env[65503]: WARNING openstack [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.998125] env[65503]: WARNING neutronclient.v2_0.client [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.999589] env[65503]: WARNING openstack [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.999589] env[65503]: WARNING openstack [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 779.008811] env[65503]: DEBUG nova.compute.manager [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 779.009168] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.013015] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ceaa715-1b7f-4142-96c4-9ff6761862e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.021488] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.021868] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90e4219f-a66f-4f6c-b923-e363dc1fd9ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.033644] env[65503]: DEBUG oslo_vmware.api [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 779.033644] env[65503]: value = "task-4449897" [ 779.033644] env[65503]: _type = "Task" [ 779.033644] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.042697] env[65503]: DEBUG oslo_vmware.api [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.096216] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449896, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636047} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.096647] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 1bda7a65-0231-4753-9762-43e9b13bd893/1bda7a65-0231-4753-9762-43e9b13bd893.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.096880] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.097347] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-760b6e01-0ff5-4dd9-ac5b-74a0c1a8c790 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.107600] env[65503]: DEBUG nova.network.neutron [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updated VIF entry in instance network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 779.108164] env[65503]: DEBUG nova.network.neutron [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [{"id": "0edc90ad-4b80-4fad-8456-06f696d9756a", "address": "fa:16:3e:37:71:1c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0edc90ad-4b", "ovs_interfaceid": "0edc90ad-4b80-4fad-8456-06f696d9756a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 779.122400] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 779.122400] env[65503]: value = "task-4449898" [ 779.122400] env[65503]: _type = "Task" [ 779.122400] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.131835] env[65503]: INFO nova.compute.manager [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Took 31.30 seconds to build instance. [ 779.151377] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449898, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.197387] env[65503]: DEBUG nova.network.neutron [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Updated VIF entry in instance network info cache for port f1edfbc1-d9c4-460c-90ae-43d46d3fe56a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 779.197746] env[65503]: DEBUG nova.network.neutron [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Updating instance_info_cache with network_info: [{"id": "f1edfbc1-d9c4-460c-90ae-43d46d3fe56a", "address": "fa:16:3e:47:b3:eb", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1edfbc1-d9", "ovs_interfaceid": "f1edfbc1-d9c4-460c-90ae-43d46d3fe56a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 779.236229] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 779.236588] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 779.286520] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 779.315882] env[65503]: DEBUG nova.compute.manager [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 779.316120] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.317618] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 779.317974] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 779.326259] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752e6bb8-4f4b-4b4c-918f-2ea2beab5126 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.339523] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.339901] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c872adf7-1806-487d-b9a9-c6a1480545d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.348520] env[65503]: DEBUG oslo_vmware.api [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 779.348520] env[65503]: value = "task-4449899" [ 779.348520] env[65503]: _type = "Task" [ 779.348520] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.366981] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52994bcc-0e56-5033-2675-635f5ee0f2d3, 'name': SearchDatastore_Task, 'duration_secs': 0.033153} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.375815] env[65503]: DEBUG oslo_vmware.api [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.378950] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e25cdeb4-6dcf-4e08-b8f1-9aa5f410197d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.386027] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 779.386027] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52598c20-5a6d-671e-40fe-695b9bb35da3" [ 779.386027] env[65503]: _type = "Task" [ 779.386027] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.395676] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52598c20-5a6d-671e-40fe-695b9bb35da3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.408829] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 779.409143] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 779.409480] env[65503]: WARNING openstack [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 779.448397] env[65503]: DEBUG nova.compute.manager [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 779.448397] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.449152] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a3c128-47c4-4866-a16e-3fce13d63742 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.459889] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.459889] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a441450a-10ab-483d-bed1-7058d5db016e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.469282] env[65503]: DEBUG oslo_vmware.api [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 779.469282] env[65503]: value = "task-4449900" [ 779.469282] env[65503]: _type = "Task" [ 779.469282] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.482019] env[65503]: DEBUG oslo_vmware.api [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.486525] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3576bd-4d86-42b5-a7f9-8f696b9a2e1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.496156] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a71e273-b79f-4249-8e96-23aa8225ba3b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.533605] env[65503]: DEBUG nova.network.neutron [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Updating instance_info_cache with network_info: [{"id": "264a9149-95a0-4c4c-89d4-578b30882bcb", "address": "fa:16:3e:f2:6a:0e", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264a9149-95", "ovs_interfaceid": "264a9149-95a0-4c4c-89d4-578b30882bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 779.537119] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c1768e-3f35-4185-8238-320557ad4157 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.560086] env[65503]: DEBUG oslo_vmware.api [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449897, 'name': PowerOffVM_Task, 'duration_secs': 0.240244} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.561519] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9245bf1-eb88-40d8-bf71-90e73dd75eeb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.566338] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.566598] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.567274] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84647fb2-d43a-44b8-b86b-e5afececf319 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.583623] env[65503]: DEBUG nova.compute.provider_tree [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.626700] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] Releasing lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.629493] env[65503]: DEBUG nova.compute.manager [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received event network-vif-deleted-59bb5f92-c2ee-45d1-bbc8-c6c625de0ebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 779.629493] env[65503]: DEBUG nova.compute.manager [req-cd32d66c-227f-496a-b522-8de75a63b001 req-617f2871-981b-4c0d-b8cc-a845d88d8b47 service nova] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Received event network-vif-deleted-e646f6df-8d32-4092-a47b-63baf184da39 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 779.644708] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0ac72052-31ba-4ec3-8f1d-ca2779424a6a tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.831s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.645196] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449898, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095708} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.645520] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.646598] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da13169c-67fc-4050-9122-7743b518159b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.678163] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 1bda7a65-0231-4753-9762-43e9b13bd893/1bda7a65-0231-4753-9762-43e9b13bd893.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.680128] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab3e7738-58e0-4548-b3e2-e5c11f99ab5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.695628] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 779.695727] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 779.695914] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Deleting the datastore file [datastore2] 0c0c6d3e-f4d2-458f-aa69-19f87a37f162 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 779.696203] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02b7679a-025e-4e80-8185-8ff0d1d24d31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.700442] env[65503]: DEBUG oslo_concurrency.lockutils [req-286b2f3c-a974-460e-9e5f-6eb45e77ee61 req-3bac0973-31ae-413f-a48b-fac9c5b6b0af service nova] Releasing lock "refresh_cache-1bda7a65-0231-4753-9762-43e9b13bd893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.705280] env[65503]: DEBUG oslo_vmware.api [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for the task: (returnval){ [ 779.705280] env[65503]: value = "task-4449902" [ 779.705280] env[65503]: _type = "Task" [ 779.705280] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.707042] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 779.707042] env[65503]: value = "task-4449903" [ 779.707042] env[65503]: _type = "Task" [ 779.707042] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.726164] env[65503]: DEBUG oslo_vmware.api [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.726366] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449903, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.866924] env[65503]: DEBUG oslo_vmware.api [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449899, 'name': PowerOffVM_Task, 'duration_secs': 0.258824} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.867183] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.867339] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.867619] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc5040e4-62bd-4ec2-a544-99178c058164 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.906852] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52598c20-5a6d-671e-40fe-695b9bb35da3, 'name': SearchDatastore_Task, 'duration_secs': 0.020634} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.907300] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.907610] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f840b178-fd54-4c84-808c-a14c99a5ecdd/f840b178-fd54-4c84-808c-a14c99a5ecdd.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.907931] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ccc784d-c826-46ea-98d5-02fd9d617f11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.918437] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 779.918437] env[65503]: value = "task-4449905" [ 779.918437] env[65503]: _type = "Task" [ 779.918437] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.927746] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.947790] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 779.947790] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 779.947790] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Deleting the datastore file [datastore1] f7b81948-c480-47a4-9d0f-5c2c163bd7f2 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 779.953773] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89bb1f2c-8fcf-4a6a-b896-fb7f93566d28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.957177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "bdbae548-eefc-4e59-8053-f4b8e232580d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.957177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.957998] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.958206] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.967818] env[65503]: DEBUG oslo_vmware.api [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for the task: (returnval){ [ 779.967818] env[65503]: value = "task-4449906" [ 779.967818] env[65503]: _type = "Task" [ 779.967818] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.991318] env[65503]: DEBUG oslo_vmware.api [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449900, 'name': PowerOffVM_Task, 'duration_secs': 0.237135} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.991675] env[65503]: DEBUG oslo_vmware.api [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.992111] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.992407] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.992821] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f941ad34-4691-4933-a344-db7a91b55894 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.042808] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "refresh_cache-62a18449-7cec-4785-a340-d0450adc8044" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.043276] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Instance network_info: |[{"id": "264a9149-95a0-4c4c-89d4-578b30882bcb", "address": "fa:16:3e:f2:6a:0e", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264a9149-95", "ovs_interfaceid": "264a9149-95a0-4c4c-89d4-578b30882bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 780.043795] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:6a:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '264a9149-95a0-4c4c-89d4-578b30882bcb', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 780.053619] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 780.053691] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 780.056797] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e2c662f-e3bd-4aea-bd0c-6dab764c4317 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.073518] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.073851] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.074233] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Deleting the datastore file [datastore2] 1c598208-a4d0-46b8-9a9c-107353e957b9 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.075051] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9fdf141-f0ea-4b5d-b357-947515d2081a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.088677] env[65503]: DEBUG oslo_vmware.api [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 780.088677] env[65503]: value = "task-4449908" [ 780.088677] env[65503]: _type = "Task" [ 780.088677] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.095155] env[65503]: DEBUG nova.scheduler.client.report [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.095155] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 780.095155] env[65503]: value = "task-4449909" [ 780.095155] env[65503]: _type = "Task" [ 780.095155] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.108023] env[65503]: DEBUG oslo_vmware.api [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.114778] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449909, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.224537] env[65503]: DEBUG oslo_vmware.api [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Task: {'id': task-4449902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312577} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.229436] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 780.229436] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 780.229436] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.229917] env[65503]: INFO nova.compute.manager [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Took 1.22 seconds to destroy the instance on the hypervisor. [ 780.229917] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 780.230101] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449903, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.230428] env[65503]: DEBUG nova.compute.manager [-] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 780.230544] env[65503]: DEBUG nova.network.neutron [-] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 780.230872] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 780.231476] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 780.231735] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 780.390766] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 780.433503] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449905, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.461689] env[65503]: DEBUG nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 780.465330] env[65503]: DEBUG nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 780.481833] env[65503]: DEBUG oslo_vmware.api [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Task: {'id': task-4449906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285903} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.482370] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 780.482370] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 780.482559] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.482823] env[65503]: INFO nova.compute.manager [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 780.483254] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 780.483449] env[65503]: DEBUG nova.compute.manager [-] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 780.483556] env[65503]: DEBUG nova.network.neutron [-] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 780.483849] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 780.484622] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 780.484967] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 780.596240] env[65503]: DEBUG oslo_vmware.api [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.607803] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.954s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.608128] env[65503]: INFO nova.compute.manager [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Migrating [ 780.615598] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449909, 'name': CreateVM_Task, 'duration_secs': 0.478659} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.619030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.307s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.619281] env[65503]: DEBUG nova.objects.instance [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lazy-loading 'resources' on Instance uuid 7ed036d1-8188-4aab-9d6d-8d7e46147812 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.620510] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.626407] env[65503]: WARNING neutronclient.v2_0.client [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 780.626783] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.627034] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.627259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 780.628284] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f62276da-2f7d-4eb4-884a-00f31965f59c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.637302] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 780.637302] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea9882-b646-6e5f-2dd1-1c637648ffa6" [ 780.637302] env[65503]: _type = "Task" [ 780.637302] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.651443] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea9882-b646-6e5f-2dd1-1c637648ffa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.705581] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 780.726026] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449903, 'name': ReconfigVM_Task, 'duration_secs': 0.608756} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.726026] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 1bda7a65-0231-4753-9762-43e9b13bd893/1bda7a65-0231-4753-9762-43e9b13bd893.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.726026] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-633c3a0e-c72e-422e-88b5-20dc56015efa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.736240] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 780.736240] env[65503]: value = "task-4449910" [ 780.736240] env[65503]: _type = "Task" [ 780.736240] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.747811] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449910, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.933094] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634886} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.933469] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f840b178-fd54-4c84-808c-a14c99a5ecdd/f840b178-fd54-4c84-808c-a14c99a5ecdd.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 780.933611] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.933871] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba0774d7-9b13-4ba2-9537-2745e063ae6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.944045] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 780.944045] env[65503]: value = "task-4449911" [ 780.944045] env[65503]: _type = "Task" [ 780.944045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.956318] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.000559] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.001787] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.097861] env[65503]: DEBUG oslo_vmware.api [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4449908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.641366} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.098223] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.098508] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 781.098747] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 781.098978] env[65503]: INFO nova.compute.manager [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Took 1.65 seconds to destroy the instance on the hypervisor. [ 781.099306] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 781.099562] env[65503]: DEBUG nova.compute.manager [-] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 781.099708] env[65503]: DEBUG nova.network.neutron [-] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 781.100064] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.100622] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 781.101078] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 781.130104] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.130987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.130987] env[65503]: DEBUG nova.network.neutron [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 781.150782] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea9882-b646-6e5f-2dd1-1c637648ffa6, 'name': SearchDatastore_Task, 'duration_secs': 0.043068} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.150782] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.151133] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.151194] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.151418] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.151493] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.152861] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c177202-a99f-4efd-a59d-639ea9dfaf9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.166852] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.166852] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.166852] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10251515-22c1-4d6e-a252-7e93f04829d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.173580] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 781.173580] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cabc37-4789-d720-6081-e1befc6e1455" [ 781.173580] env[65503]: _type = "Task" [ 781.173580] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.189695] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cabc37-4789-d720-6081-e1befc6e1455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.245618] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449910, 'name': Rename_Task, 'duration_secs': 0.270192} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.248529] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 781.249022] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdd4718c-d776-4cec-bf4c-87d4d1f690d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.260268] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 781.260268] env[65503]: value = "task-4449912" [ 781.260268] env[65503]: _type = "Task" [ 781.260268] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.273338] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.353035] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.464053] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123869} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.464053] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.465033] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4563fa0-594e-409e-84e1-0fb277c5f4ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.488782] env[65503]: DEBUG nova.network.neutron [-] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 781.500431] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] f840b178-fd54-4c84-808c-a14c99a5ecdd/f840b178-fd54-4c84-808c-a14c99a5ecdd.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.507758] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fe842fd-f5be-4a06-bdb7-e0a1d9af78d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.535409] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 781.535409] env[65503]: value = "task-4449913" [ 781.535409] env[65503]: _type = "Task" [ 781.535409] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.547163] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449913, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.635680] env[65503]: WARNING neutronclient.v2_0.client [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.636923] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 781.636923] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 781.692900] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cabc37-4789-d720-6081-e1befc6e1455, 'name': SearchDatastore_Task, 'duration_secs': 0.020401} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.692900] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8adc809-cc6c-47e7-84d9-efff83bc2b8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.699487] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 781.699487] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522133f3-7e2f-31d5-e43a-a38b5eaa0464" [ 781.699487] env[65503]: _type = "Task" [ 781.699487] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.712156] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522133f3-7e2f-31d5-e43a-a38b5eaa0464, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.771222] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449912, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.855934] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c584e3-b657-4a9b-85d4-d06f9983ad06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.867042] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676a30bd-f819-4d6d-a9c6-734bbc995ed3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.908159] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33874fbe-7eba-4af7-991e-5d45db7d0bfb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.917417] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef623c8-86b8-4f11-a816-ae04b84661f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.933594] env[65503]: DEBUG nova.compute.provider_tree [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.988128] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 781.988128] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 782.001946] env[65503]: INFO nova.compute.manager [-] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Took 1.77 seconds to deallocate network for instance. [ 782.002281] env[65503]: DEBUG nova.network.neutron [-] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 782.050314] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.071489] env[65503]: WARNING neutronclient.v2_0.client [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 782.072115] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 782.072568] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 782.214967] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522133f3-7e2f-31d5-e43a-a38b5eaa0464, 'name': SearchDatastore_Task, 'duration_secs': 0.013648} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.215404] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.217579] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 62a18449-7cec-4785-a340-d0450adc8044/62a18449-7cec-4785-a340-d0450adc8044.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.217774] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0531e8a1-6006-4226-a57d-6f19b86fdd1b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.227394] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 782.227394] env[65503]: value = "task-4449914" [ 782.227394] env[65503]: _type = "Task" [ 782.227394] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.240114] env[65503]: DEBUG nova.network.neutron [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [{"id": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "address": "fa:16:3e:b6:69:8c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633bd812-c5", "ovs_interfaceid": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 782.246663] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.271368] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449912, 'name': PowerOnVM_Task, 'duration_secs': 0.996453} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.271368] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.271368] env[65503]: INFO nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Took 10.55 seconds to spawn the instance on the hypervisor. [ 782.271368] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 782.272264] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32d336d-a88c-4685-aeeb-95aef551adec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.283747] env[65503]: DEBUG nova.network.neutron [-] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 782.437485] env[65503]: DEBUG nova.scheduler.client.report [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.506596] env[65503]: INFO nova.compute.manager [-] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Took 2.02 seconds to deallocate network for instance. [ 782.513571] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.551289] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449913, 'name': ReconfigVM_Task, 'duration_secs': 0.683596} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.552074] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Reconfigured VM instance instance-00000032 to attach disk [datastore2] f840b178-fd54-4c84-808c-a14c99a5ecdd/f840b178-fd54-4c84-808c-a14c99a5ecdd.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.552903] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f512d57e-f18b-465b-a270-9d1a32f3bde5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.562616] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 782.562616] env[65503]: value = "task-4449915" [ 782.562616] env[65503]: _type = "Task" [ 782.562616] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.575638] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449915, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.588171] env[65503]: INFO nova.compute.manager [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Rebuilding instance [ 782.662460] env[65503]: DEBUG nova.compute.manager [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 782.663452] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b163a688-af7e-4757-8b99-5231862b7ac0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.747927] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.748465] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.750201] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449914, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.750942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.793931] env[65503]: INFO nova.compute.manager [-] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Took 1.69 seconds to deallocate network for instance. [ 782.800644] env[65503]: INFO nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Took 33.71 seconds to build instance. [ 782.943827] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.325s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.946441] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.320s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.948072] env[65503]: INFO nova.compute.claims [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.970297] env[65503]: INFO nova.scheduler.client.report [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Deleted allocations for instance 7ed036d1-8188-4aab-9d6d-8d7e46147812 [ 783.017471] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.073765] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449915, 'name': Rename_Task, 'duration_secs': 0.27622} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.074096] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 783.074506] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d393711-53c2-44a3-95f5-f669d0b1e4e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.082669] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 783.082669] env[65503]: value = "task-4449916" [ 783.082669] env[65503]: _type = "Task" [ 783.082669] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.092998] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.176897] env[65503]: DEBUG nova.compute.manager [req-5e6b50ec-9dd9-4dac-a3bd-4496379767de req-ebcf8bac-9aa1-476a-93c9-3e1c12c60b9d service nova] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Received event network-vif-deleted-4fd37874-1199-4205-b01b-28c18959441c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 783.224191] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Received event network-vif-plugged-4fb9999c-b567-4158-9058-b0c36232af7c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 783.224191] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquiring lock "f840b178-fd54-4c84-808c-a14c99a5ecdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.224484] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.224609] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.224855] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] No waiting events found dispatching network-vif-plugged-4fb9999c-b567-4158-9058-b0c36232af7c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 783.225100] env[65503]: WARNING nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Received unexpected event network-vif-plugged-4fb9999c-b567-4158-9058-b0c36232af7c for instance with vm_state building and task_state spawning. [ 783.225344] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Received event network-changed-4fb9999c-b567-4158-9058-b0c36232af7c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 783.225569] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Refreshing instance network info cache due to event network-changed-4fb9999c-b567-4158-9058-b0c36232af7c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 783.225864] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquiring lock "refresh_cache-f840b178-fd54-4c84-808c-a14c99a5ecdd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.226061] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquired lock "refresh_cache-f840b178-fd54-4c84-808c-a14c99a5ecdd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.226393] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Refreshing network info cache for port 4fb9999c-b567-4158-9058-b0c36232af7c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 783.240864] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449914, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608359} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.241145] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 62a18449-7cec-4785-a340-d0450adc8044/62a18449-7cec-4785-a340-d0450adc8044.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 783.241484] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 783.242263] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a35490ed-65b3-484e-8f67-bb02d9defe49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.253285] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 783.253285] env[65503]: value = "task-4449917" [ 783.253285] env[65503]: _type = "Task" [ 783.253285] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.253285] env[65503]: DEBUG nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 783.280533] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.305821] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "1bda7a65-0231-4753-9762-43e9b13bd893" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.227s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.306912] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.480120] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a986d2-56bf-4e0b-ae7c-fabba1fe6f62 tempest-InstanceActionsV221TestJSON-470792591 tempest-InstanceActionsV221TestJSON-470792591-project-member] Lock "7ed036d1-8188-4aab-9d6d-8d7e46147812" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.584s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.598439] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449916, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.682881] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.683288] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f91476b-4e5e-4654-ab98-8b0b787b9e78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.691660] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 783.691660] env[65503]: value = "task-4449918" [ 783.691660] env[65503]: _type = "Task" [ 783.691660] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.703194] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.734669] env[65503]: WARNING neutronclient.v2_0.client [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 783.738604] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 783.739556] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 783.767094] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074205} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.770630] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.780705] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f472a3-31dc-4222-8f9d-3d4330a70014 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.824506] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 62a18449-7cec-4785-a340-d0450adc8044/62a18449-7cec-4785-a340-d0450adc8044.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.825338] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.826285] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-533adc9f-1e3f-4da5-a9cd-016057628d9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.853045] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 783.853045] env[65503]: value = "task-4449919" [ 783.853045] env[65503]: _type = "Task" [ 783.853045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.868435] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449919, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.106334] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449916, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.208848] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449918, 'name': PowerOffVM_Task, 'duration_secs': 0.408859} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.214761] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.215626] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.216358] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf7c643-5607-4556-8df3-0587b943eb35 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.228614] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.228741] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-515339f7-6a67-4c32-b8d4-12ac67a9548a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.257262] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 784.257737] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 784.302331] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ee7238-eb1b-4a2d-aae8-35b7262d50ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.309620] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.309620] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.309620] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleting the datastore file [datastore1] ad85eef0-cef7-4900-b193-1737a6c2f17b {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.309924] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0573229c-e732-4726-a73e-f254a9d83740 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.328017] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance '9809fc8b-3842-4ce3-bb63-8ea37ee3bf51' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 784.346669] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 784.346669] env[65503]: value = "task-4449921" [ 784.346669] env[65503]: _type = "Task" [ 784.346669] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.362271] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.371916] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449919, 'name': ReconfigVM_Task, 'duration_secs': 0.406708} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.372289] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 62a18449-7cec-4785-a340-d0450adc8044/62a18449-7cec-4785-a340-d0450adc8044.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.373012] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1da270a-1c2f-46fe-a3eb-cb3700e1990b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.383705] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 784.383705] env[65503]: value = "task-4449922" [ 784.383705] env[65503]: _type = "Task" [ 784.383705] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.395341] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449922, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.415043] env[65503]: WARNING neutronclient.v2_0.client [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 784.416435] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 784.416435] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 784.600667] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449916, 'name': PowerOnVM_Task, 'duration_secs': 1.069309} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.601033] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 784.601620] env[65503]: INFO nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Took 10.14 seconds to spawn the instance on the hypervisor. [ 784.601839] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 784.602658] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3cde86-8239-40b6-9c61-052bb80069a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.683982] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118efee9-fbe4-45d9-84e9-4be29c0a8cf2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.696744] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6194d9e4-9706-4df7-a0a1-f727afc54ab0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.737049] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09317a6e-ade3-4ac0-9ecd-e286e97565c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.745739] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e81818-3ece-4325-93f8-c47798db3c0f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.753821] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Updated VIF entry in instance network info cache for port 4fb9999c-b567-4158-9058-b0c36232af7c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 784.754295] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Updating instance_info_cache with network_info: [{"id": "4fb9999c-b567-4158-9058-b0c36232af7c", "address": "fa:16:3e:ef:81:9a", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb9999c-b5", "ovs_interfaceid": "4fb9999c-b567-4158-9058-b0c36232af7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 784.770932] env[65503]: DEBUG nova.compute.provider_tree [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.843089] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 784.843497] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24db8263-0663-4844-9056-26741f4000c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.853181] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 784.853181] env[65503]: value = "task-4449923" [ 784.853181] env[65503]: _type = "Task" [ 784.853181] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.863585] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.503837} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.864172] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.864416] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.866743] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.874216] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.896201] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449922, 'name': Rename_Task, 'duration_secs': 0.154396} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.896499] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 784.896761] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbf07ef8-a8b0-4b53-88ff-760791a9df39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.905728] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 784.905728] env[65503]: value = "task-4449924" [ 784.905728] env[65503]: _type = "Task" [ 784.905728] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.917722] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.127283] env[65503]: INFO nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Took 35.92 seconds to build instance. [ 785.259145] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Releasing lock "refresh_cache-f840b178-fd54-4c84-808c-a14c99a5ecdd" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.259437] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 785.259578] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing instance network info cache due to event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 785.259877] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquiring lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.260149] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquired lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.260328] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 785.276041] env[65503]: DEBUG nova.scheduler.client.report [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 785.368198] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449923, 'name': PowerOffVM_Task, 'duration_secs': 0.221108} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.368527] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 785.368719] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance '9809fc8b-3842-4ce3-bb63-8ea37ee3bf51' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 785.416855] env[65503]: DEBUG oslo_vmware.api [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449924, 'name': PowerOnVM_Task, 'duration_secs': 0.51137} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.417157] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 785.417348] env[65503]: INFO nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Took 8.09 seconds to spawn the instance on the hypervisor. [ 785.417873] env[65503]: DEBUG nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 785.418712] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b51a00-ff70-46e6-9a31-d22ed2c7238e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.629056] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.429s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.766144] env[65503]: WARNING neutronclient.v2_0.client [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 785.766144] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 785.766144] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 785.783865] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.835s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.783865] env[65503]: DEBUG nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 785.785603] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.781s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.786123] env[65503]: DEBUG nova.objects.instance [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lazy-loading 'resources' on Instance uuid 8f0bf665-b21b-42ed-816d-69dee2f40654 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.877663] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 785.877978] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 785.878172] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 785.878386] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 785.878545] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 785.878729] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 785.878962] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.879575] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 785.879785] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 785.880042] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 785.880406] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 785.889426] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-775ec8d2-f1c2-4d46-8957-67e28995e8b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.912978] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 785.912978] env[65503]: value = "task-4449925" [ 785.912978] env[65503]: _type = "Task" [ 785.912978] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.922563] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449925, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.943187] env[65503]: INFO nova.compute.manager [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Took 36.14 seconds to build instance. [ 785.953499] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 785.954011] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 785.954011] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 785.954135] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 785.954274] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 785.955261] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 785.955261] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.955261] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 785.955261] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 785.955261] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 785.956459] env[65503]: DEBUG nova.virt.hardware [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 785.957313] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c216ad-af8d-4ddb-96c3-334224e991b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.970075] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 785.972592] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 785.985052] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01706cd4-5bbc-4ed9-a4a9-5aa4a0a843fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.005673] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:c2:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6164c166-4054-4e4a-93fb-6e84abe74f7d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.021609] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 786.027466] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.028180] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-716e6bd9-2cff-49d8-97d9-2761c458c95e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.051587] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.051587] env[65503]: value = "task-4449926" [ 786.051587] env[65503]: _type = "Task" [ 786.051587] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.061392] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449926, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.084206] env[65503]: WARNING neutronclient.v2_0.client [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 786.085442] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 786.085738] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 786.236299] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "972a50ed-759a-4312-9314-9bf01a03fc3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.236566] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.284530] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updated VIF entry in instance network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 786.284934] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [{"id": "0edc90ad-4b80-4fad-8456-06f696d9756a", "address": "fa:16:3e:37:71:1c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0edc90ad-4b", "ovs_interfaceid": "0edc90ad-4b80-4fad-8456-06f696d9756a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 786.290295] env[65503]: DEBUG nova.compute.utils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 786.295016] env[65503]: DEBUG nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 786.295311] env[65503]: DEBUG nova.network.neutron [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 786.295674] env[65503]: WARNING neutronclient.v2_0.client [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 786.296078] env[65503]: WARNING neutronclient.v2_0.client [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 786.299120] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 786.299120] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 786.378923] env[65503]: DEBUG nova.policy [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e193d8d730e14c348b38c407f58cdc56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34e8cd66745a40d2acebbce98050ee5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.428185] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449925, 'name': ReconfigVM_Task, 'duration_secs': 0.304193} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.429550] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance '9809fc8b-3842-4ce3-bb63-8ea37ee3bf51' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 786.434383] env[65503]: DEBUG nova.compute.manager [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Received event network-vif-deleted-7dcf2e2a-4e77-459c-9936-568c34f49a33 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.435585] env[65503]: DEBUG nova.compute.manager [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Received event network-vif-deleted-ab13f109-bf67-4d27-b8ff-d9a97f6dbc0a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.435818] env[65503]: DEBUG nova.compute.manager [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Received event network-changed-d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.435989] env[65503]: DEBUG nova.compute.manager [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Refreshing instance network info cache due to event network-changed-d53b3aea-2563-4099-a577-bb623eb1e83e. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 786.436239] env[65503]: DEBUG oslo_concurrency.lockutils [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Acquiring lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.436424] env[65503]: DEBUG oslo_concurrency.lockutils [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Acquired lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.436628] env[65503]: DEBUG nova.network.neutron [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Refreshing network info cache for port d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 786.449806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e9c18750-9e25-4058-89b5-7b5f2f3e2408 tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "62a18449-7cec-4785-a340-d0450adc8044" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.194s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.451023] env[65503]: DEBUG nova.compute.manager [req-1cd70305-2310-44f3-a9eb-735f4d1bea73 req-7944b396-980c-4832-af0d-c461fcab3562 service nova] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Received event network-vif-deleted-9f691f3d-5247-46d4-b46b-6840c2cc557d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.469941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.469941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.469941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.469941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.469941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.476030] env[65503]: INFO nova.compute.manager [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Terminating instance [ 786.566989] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449926, 'name': CreateVM_Task, 'duration_secs': 0.478737} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.567272] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.567963] env[65503]: WARNING neutronclient.v2_0.client [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 786.568400] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.568617] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.569884] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 786.572442] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3e253b4-4307-402c-b790-0eb81e773f71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.580290] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 786.580290] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e00eb2-feb1-6acb-9629-ef5035d96b8e" [ 786.580290] env[65503]: _type = "Task" [ 786.580290] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.589552] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e00eb2-feb1-6acb-9629-ef5035d96b8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.741553] env[65503]: DEBUG nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 786.788121] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Releasing lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.789230] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Received event network-changed-d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.789230] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Refreshing instance network info cache due to event network-changed-d53b3aea-2563-4099-a577-bb623eb1e83e. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 786.789230] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquiring lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.799025] env[65503]: DEBUG nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 786.866044] env[65503]: DEBUG nova.compute.manager [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 786.867347] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445e2499-d645-4946-ad6b-45f53811adcb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.917251] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773114d3-aa24-4e79-a227-277d48d15242 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.926369] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13b184c-0a28-420f-8cc9-7d93d28f5cc7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.962756] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:49:18Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e416d5a6-7c96-408f-8f4c-2aff52378276',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1101851334',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 786.962983] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 786.963168] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 786.963373] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 786.963598] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 786.963817] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 786.963973] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.964144] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 786.964306] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 786.964461] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 786.964631] env[65503]: DEBUG nova.virt.hardware [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 786.969895] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfiguring VM instance instance-00000026 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 786.970517] env[65503]: WARNING neutronclient.v2_0.client [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 786.971181] env[65503]: WARNING openstack [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 786.971530] env[65503]: WARNING openstack [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 786.979405] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c16859e2-2308-40f5-a592-5930b3904509 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.994469] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a70fbd-00c6-45cd-9b43-f1d885858016 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.997877] env[65503]: DEBUG nova.compute.manager [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 786.998100] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.999147] env[65503]: DEBUG nova.network.neutron [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Successfully created port: b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 787.001845] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b5f3d2-d766-4a8b-8083-3eb48ab4c1f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.014582] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70b2201-157c-4388-a83d-4ef31a509a55 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.020295] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 787.020662] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 787.020662] env[65503]: value = "task-4449927" [ 787.020662] env[65503]: _type = "Task" [ 787.020662] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.021170] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-150ac3a0-4ba2-4953-8a2e-2bbf6289c685 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.037058] env[65503]: DEBUG nova.compute.provider_tree [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.042293] env[65503]: DEBUG oslo_vmware.api [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 787.042293] env[65503]: value = "task-4449928" [ 787.042293] env[65503]: _type = "Task" [ 787.042293] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.046211] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449927, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.056542] env[65503]: DEBUG oslo_vmware.api [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.092443] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e00eb2-feb1-6acb-9629-ef5035d96b8e, 'name': SearchDatastore_Task, 'duration_secs': 0.017226} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.092443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.092443] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.092719] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.092933] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.093081] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.093408] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0007754d-b73c-4081-9db2-d1021ee99d7c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.105848] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.106484] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.108171] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ee5309b-9cac-4d0e-8d35-58a183bcb330 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.117930] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 787.117930] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5274f5c5-6abd-5673-eb87-e687c7af90bf" [ 787.117930] env[65503]: _type = "Task" [ 787.117930] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.132078] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5274f5c5-6abd-5673-eb87-e687c7af90bf, 'name': SearchDatastore_Task, 'duration_secs': 0.011669} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.132951] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7e55e38-a5a4-41fe-9494-0c5b76ddf644 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.140692] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 787.140692] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528b4caf-3b33-f709-e805-fb8bdde2d0fb" [ 787.140692] env[65503]: _type = "Task" [ 787.140692] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.152660] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528b4caf-3b33-f709-e805-fb8bdde2d0fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.269356] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.381363] env[65503]: INFO nova.compute.manager [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] instance snapshotting [ 787.385060] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec3d84c-8cb4-4cfd-a9a4-4a80c2174506 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.410962] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245a7ca0-a1b2-4aa3-9957-ded99db4aa79 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.536271] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449927, 'name': ReconfigVM_Task, 'duration_secs': 0.231745} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.536588] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfigured VM instance instance-00000026 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 787.537979] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2990494f-7d42-4992-b7ff-2df6c436915a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.540866] env[65503]: DEBUG nova.scheduler.client.report [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.570457] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 787.571955] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ba39056-1ed4-4b6d-a203-8e7690c7249d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.588749] env[65503]: DEBUG oslo_vmware.api [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449928, 'name': PowerOffVM_Task, 'duration_secs': 0.25816} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.589570] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 787.590171] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 787.590171] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d8dbde9-9d9e-448e-8d8a-e88cf0a16c28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.596657] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 787.596657] env[65503]: value = "task-4449929" [ 787.596657] env[65503]: _type = "Task" [ 787.596657] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.608620] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.656284] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528b4caf-3b33-f709-e805-fb8bdde2d0fb, 'name': SearchDatastore_Task, 'duration_secs': 0.011996} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.656780] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.657201] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.657624] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ed648d9-39d3-46fb-8d34-73970af5ff20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.668550] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 787.668550] env[65503]: value = "task-4449931" [ 787.668550] env[65503]: _type = "Task" [ 787.668550] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.676999] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 787.677216] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 787.677702] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Deleting the datastore file [datastore2] ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.681037] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c073847-90f9-47e4-8233-e2695c1b08a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.683326] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449931, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.689015] env[65503]: DEBUG oslo_vmware.api [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 787.689015] env[65503]: value = "task-4449932" [ 787.689015] env[65503]: _type = "Task" [ 787.689015] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.698583] env[65503]: DEBUG oslo_vmware.api [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.808235] env[65503]: DEBUG nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 787.838586] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 787.838832] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.839013] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 787.839207] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.839364] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 787.839506] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 787.839834] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 787.840122] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 787.840330] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 787.840506] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 787.840679] env[65503]: DEBUG nova.virt.hardware [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 787.841684] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2342370f-1129-403b-98f2-237438922a1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.851657] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287f9461-6292-42c8-abb3-f64743ef6df7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.925015] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 787.925417] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9f1db615-218e-49e2-b1ef-e081a4c141d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.937849] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 787.937849] env[65503]: value = "task-4449933" [ 787.937849] env[65503]: _type = "Task" [ 787.937849] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.952716] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449933, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.047438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.261s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.051460] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.200s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.051792] env[65503]: DEBUG nova.objects.instance [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'resources' on Instance uuid 9acbc312-a3a2-4758-87cd-5576c4f1f8dc {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.082554] env[65503]: INFO nova.scheduler.client.report [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Deleted allocations for instance 8f0bf665-b21b-42ed-816d-69dee2f40654 [ 788.116348] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449929, 'name': ReconfigVM_Task, 'duration_secs': 0.452956} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.117022] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 788.119170] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance '9809fc8b-3842-4ce3-bb63-8ea37ee3bf51' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 788.183782] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449931, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.202060] env[65503]: DEBUG oslo_vmware.api [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4449932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245995} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.202450] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.202687] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 788.202925] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.203124] env[65503]: INFO nova.compute.manager [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Took 1.21 seconds to destroy the instance on the hypervisor. [ 788.203468] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 788.204220] env[65503]: DEBUG nova.compute.manager [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 788.204318] env[65503]: DEBUG nova.network.neutron [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 788.204790] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 788.206275] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 788.206561] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 788.455157] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449933, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.597095] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b3c610cc-f787-44ee-937e-98909cfe7fbc tempest-FloatingIPsAssociationNegativeTestJSON-1604283335 tempest-FloatingIPsAssociationNegativeTestJSON-1604283335-project-member] Lock "8f0bf665-b21b-42ed-816d-69dee2f40654" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.837s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.628070] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c49792b-757c-41ab-a275-a2db892d0608 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.663234] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26244d53-287d-48e0-8f15-1a168ef58a9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.693501] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance '9809fc8b-3842-4ce3-bb63-8ea37ee3bf51' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 788.709042] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449931, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517602} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.709042] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.709042] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.709822] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fcad116-6b30-4417-984c-1b748f485577 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.723045] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 788.723045] env[65503]: value = "task-4449934" [ 788.723045] env[65503]: _type = "Task" [ 788.723045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.736769] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449934, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.833089] env[65503]: DEBUG nova.network.neutron [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Successfully updated port: b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 788.956387] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449933, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.983273] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 789.202703] env[65503]: WARNING neutronclient.v2_0.client [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 789.238126] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449934, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078498} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.241062] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.242195] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ac31ab-113e-45ec-ba25-717856b298e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.269319] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.272303] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c2b3832-0566-41d9-b13d-946b0bc92f95 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.297919] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 789.297919] env[65503]: value = "task-4449935" [ 789.297919] env[65503]: _type = "Task" [ 789.297919] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.303182] env[65503]: WARNING openstack [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 789.303588] env[65503]: WARNING openstack [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 789.321162] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449935, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.332067] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e6db67-fab7-4486-a8a3-f1e018aa3836 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.344987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.345291] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.347196] env[65503]: DEBUG nova.network.neutron [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 789.352029] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965051a0-dab6-4b4a-893a-7e0f1561f924 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.388493] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3882a243-682d-41bb-831e-7f48db098ce0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.397921] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948c42a0-603a-4899-b8d7-ff3c2f8d4c50 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.413803] env[65503]: DEBUG nova.compute.provider_tree [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.456086] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449933, 'name': CreateSnapshot_Task, 'duration_secs': 1.286074} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.457422] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 789.457854] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c483323d-0f63-4866-adbf-490244136a9e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.736161] env[65503]: DEBUG nova.network.neutron [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Port 633bd812-c51f-4ae0-bab2-ced08b56a04b binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 789.814612] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449935, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.858422] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 789.859776] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 789.870146] env[65503]: WARNING neutronclient.v2_0.client [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 789.870801] env[65503]: WARNING openstack [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 789.871500] env[65503]: WARNING openstack [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 789.916714] env[65503]: DEBUG nova.scheduler.client.report [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.987090] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 789.988523] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-345e208e-0c10-4aa9-b4bb-616218b2e5a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.999379] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 789.999379] env[65503]: value = "task-4449936" [ 789.999379] env[65503]: _type = "Task" [ 789.999379] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.010548] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449936, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.024221] env[65503]: DEBUG nova.network.neutron [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 790.120661] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 790.121160] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 790.179263] env[65503]: DEBUG nova.network.neutron [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updated VIF entry in instance network info cache for port d53b3aea-2563-4099-a577-bb623eb1e83e. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 790.179263] env[65503]: DEBUG nova.network.neutron [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updating instance_info_cache with network_info: [{"id": "d53b3aea-2563-4099-a577-bb623eb1e83e", "address": "fa:16:3e:ea:8f:8c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd53b3aea-25", "ovs_interfaceid": "d53b3aea-2563-4099-a577-bb623eb1e83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 790.199607] env[65503]: DEBUG nova.network.neutron [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 790.268620] env[65503]: WARNING neutronclient.v2_0.client [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 790.269530] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 790.270969] env[65503]: WARNING openstack [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 790.311405] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449935, 'name': ReconfigVM_Task, 'duration_secs': 0.596279} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.311655] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b/ad85eef0-cef7-4900-b193-1737a6c2f17b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.312764] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39e974a3-d65d-4a22-bddf-5ea910b0c808 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.322380] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 790.322380] env[65503]: value = "task-4449937" [ 790.322380] env[65503]: _type = "Task" [ 790.322380] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.330997] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449937, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.419423] env[65503]: DEBUG nova.network.neutron [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Updating instance_info_cache with network_info: [{"id": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "address": "fa:16:3e:2e:c2:af", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4d33353-d9", "ovs_interfaceid": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 790.421690] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.370s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.428115] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.708s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.429641] env[65503]: INFO nova.compute.claims [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.468931] env[65503]: INFO nova.scheduler.client.report [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted allocations for instance 9acbc312-a3a2-4758-87cd-5576c4f1f8dc [ 790.518208] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449936, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.682365] env[65503]: DEBUG oslo_concurrency.lockutils [req-6e0ee279-053c-4bd2-994c-e35c2cb5ea9f req-cf09ba84-7cd4-47a2-9c14-f81632f49427 service nova] Releasing lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.682910] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquired lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.683136] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Refreshing network info cache for port d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 790.703946] env[65503]: INFO nova.compute.manager [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Took 2.50 seconds to deallocate network for instance. [ 790.762501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.763282] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.763282] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.832692] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449937, 'name': Rename_Task, 'duration_secs': 0.174915} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.834091] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.837340] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20e1556f-036f-443f-9b06-62c1ef1cd1ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.844183] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 790.844183] env[65503]: value = "task-4449938" [ 790.844183] env[65503]: _type = "Task" [ 790.844183] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.857810] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.928577] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.928577] env[65503]: DEBUG nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Instance network_info: |[{"id": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "address": "fa:16:3e:2e:c2:af", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4d33353-d9", "ovs_interfaceid": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 790.928903] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:c2:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4d33353-d9f7-496e-9fb4-be39d02d940f', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.937201] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Creating folder: Project (34e8cd66745a40d2acebbce98050ee5d). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.939791] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab048bf0-9335-47a0-9fb1-0295992f7a26 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.953353] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Created folder: Project (34e8cd66745a40d2acebbce98050ee5d) in parent group-v870190. [ 790.953642] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Creating folder: Instances. Parent ref: group-v870342. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.954045] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed49d04e-a7c1-4cbd-8c4c-3038372d32a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.968537] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Created folder: Instances in parent group-v870342. [ 790.968827] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 790.969402] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.969669] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcca96fa-eebf-4af5-b39b-cbbe266c09b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.988658] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8000b3b1-1336-401a-98d5-870d216078c5 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9acbc312-a3a2-4758-87cd-5576c4f1f8dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.514s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.996368] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.996368] env[65503]: value = "task-4449941" [ 790.996368] env[65503]: _type = "Task" [ 790.996368] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.012523] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449936, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.016491] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449941, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.187473] env[65503]: WARNING neutronclient.v2_0.client [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 791.187473] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 791.187876] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 791.214378] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.282244] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 791.358514] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449938, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.486339] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 791.515131] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449941, 'name': CreateVM_Task, 'duration_secs': 0.431297} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.515336] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.515876] env[65503]: WARNING neutronclient.v2_0.client [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 791.516257] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.516406] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.516733] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.517323] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c41b805-67ee-4116-9950-b7a227fd4621 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.525657] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449936, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.534618] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 791.534618] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210c4ed-1387-6176-c619-5ef4c6b9d5ba" [ 791.534618] env[65503]: _type = "Task" [ 791.534618] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.542179] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210c4ed-1387-6176-c619-5ef4c6b9d5ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.779456] env[65503]: WARNING neutronclient.v2_0.client [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 791.862557] env[65503]: DEBUG oslo_vmware.api [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449938, 'name': PowerOnVM_Task, 'duration_secs': 0.578942} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.863807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.864051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.864398] env[65503]: DEBUG nova.network.neutron [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 791.865748] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.869139] env[65503]: DEBUG nova.compute.manager [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 791.873054] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8286cc22-306a-413b-8b10-5ccac3ca8d0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.915388] env[65503]: DEBUG nova.compute.manager [req-f6602d78-4661-49e6-9ec7-7d0a264d5544 req-f2ca4df3-86da-4dc1-9540-d801ef62e848 service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Received event network-vif-plugged-b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 791.915388] env[65503]: DEBUG oslo_concurrency.lockutils [req-f6602d78-4661-49e6-9ec7-7d0a264d5544 req-f2ca4df3-86da-4dc1-9540-d801ef62e848 service nova] Acquiring lock "a57486e1-82e3-48d5-99fe-c89b300a2136-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.915388] env[65503]: DEBUG oslo_concurrency.lockutils [req-f6602d78-4661-49e6-9ec7-7d0a264d5544 req-f2ca4df3-86da-4dc1-9540-d801ef62e848 service nova] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.916396] env[65503]: DEBUG oslo_concurrency.lockutils [req-f6602d78-4661-49e6-9ec7-7d0a264d5544 req-f2ca4df3-86da-4dc1-9540-d801ef62e848 service nova] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.916573] env[65503]: DEBUG nova.compute.manager [req-f6602d78-4661-49e6-9ec7-7d0a264d5544 req-f2ca4df3-86da-4dc1-9540-d801ef62e848 service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] No waiting events found dispatching network-vif-plugged-b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 791.916797] env[65503]: WARNING nova.compute.manager [req-f6602d78-4661-49e6-9ec7-7d0a264d5544 req-f2ca4df3-86da-4dc1-9540-d801ef62e848 service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Received unexpected event network-vif-plugged-b4d33353-d9f7-496e-9fb4-be39d02d940f for instance with vm_state building and task_state spawning. [ 791.949537] env[65503]: DEBUG nova.compute.manager [req-ade7a51f-6579-493f-ac8d-80890679a252 req-41e823c9-83c5-4664-ae61-3e59330d5566 service nova] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Received event network-vif-deleted-d53b3aea-2563-4099-a577-bb623eb1e83e {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 791.992765] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Releasing lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.993041] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Received event network-vif-plugged-264a9149-95a0-4c4c-89d4-578b30882bcb {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 791.993229] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquiring lock "62a18449-7cec-4785-a340-d0450adc8044-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.993421] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Lock "62a18449-7cec-4785-a340-d0450adc8044-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.993583] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Lock "62a18449-7cec-4785-a340-d0450adc8044-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.993740] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] No waiting events found dispatching network-vif-plugged-264a9149-95a0-4c4c-89d4-578b30882bcb {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 791.993899] env[65503]: WARNING nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Received unexpected event network-vif-plugged-264a9149-95a0-4c4c-89d4-578b30882bcb for instance with vm_state building and task_state spawning. [ 791.994911] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Received event network-changed-264a9149-95a0-4c4c-89d4-578b30882bcb {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 791.994911] env[65503]: DEBUG nova.compute.manager [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Refreshing instance network info cache due to event network-changed-264a9149-95a0-4c4c-89d4-578b30882bcb. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 791.994911] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquiring lock "refresh_cache-62a18449-7cec-4785-a340-d0450adc8044" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.994911] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Acquired lock "refresh_cache-62a18449-7cec-4785-a340-d0450adc8044" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.995893] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Refreshing network info cache for port 264a9149-95a0-4c4c-89d4-578b30882bcb {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 792.019252] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449936, 'name': CloneVM_Task, 'duration_secs': 1.726433} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.019252] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Created linked-clone VM from snapshot [ 792.019252] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bc3b2e-1ba8-4a34-8c54-68d2976aa413 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.028033] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Uploading image 936e1408-7ca9-49c1-8c6c-6b1727d8cb4e {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 792.048283] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210c4ed-1387-6176-c619-5ef4c6b9d5ba, 'name': SearchDatastore_Task, 'duration_secs': 0.012349} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.049511] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.049834] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.050121] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.050276] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.050487] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.052252] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 792.052540] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e3bc3b1-feae-4f2f-8515-51cbfab35ed2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.054744] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bb802a12-a7f5-4c6e-9e3c-077ee843c2f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.063644] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 792.063644] env[65503]: value = "task-4449942" [ 792.063644] env[65503]: _type = "Task" [ 792.063644] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.065754] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.066132] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.077210] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b597fc42-ce79-45f0-bf5d-d8eedef8f3f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.088843] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449942, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.091294] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 792.091294] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52835d3e-d7f6-a28b-76b0-dcaccc8274b3" [ 792.091294] env[65503]: _type = "Task" [ 792.091294] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.109173] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52835d3e-d7f6-a28b-76b0-dcaccc8274b3, 'name': SearchDatastore_Task, 'duration_secs': 0.013556} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.110268] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bed55f1d-7838-4367-8157-ac19e33cfa60 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.117898] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 792.117898] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52592812-3872-403e-1a24-828c3a390de4" [ 792.117898] env[65503]: _type = "Task" [ 792.117898] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.137211] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52592812-3872-403e-1a24-828c3a390de4, 'name': SearchDatastore_Task, 'duration_secs': 0.012511} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.137687] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.138016] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a57486e1-82e3-48d5-99fe-c89b300a2136/a57486e1-82e3-48d5-99fe-c89b300a2136.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.138401] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6bbe758-8698-4252-9bc0-40d73e6a33da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.148614] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 792.148614] env[65503]: value = "task-4449943" [ 792.148614] env[65503]: _type = "Task" [ 792.148614] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.163920] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.266130] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4de820-25f0-473b-8f76-3eba5bc059da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.277126] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8d12f6-2582-4687-8653-9bec53432a1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.321839] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3868e859-a7d3-41ef-afc0-add83d245580 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.330932] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afc4f04-b822-4d54-b8b0-715bc6918b01 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.349667] env[65503]: DEBUG nova.compute.provider_tree [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.371213] env[65503]: WARNING neutronclient.v2_0.client [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 792.371781] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 792.373228] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 792.398459] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.501637] env[65503]: WARNING neutronclient.v2_0.client [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 792.502530] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 792.503130] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 792.582494] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449942, 'name': Destroy_Task, 'duration_secs': 0.458578} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.582666] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Destroyed the VM [ 792.582894] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 792.583347] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-149356b4-c315-49ed-916f-d34af128c5e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.593029] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 792.593029] env[65503]: value = "task-4449944" [ 792.593029] env[65503]: _type = "Task" [ 792.593029] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.604495] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449944, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.660352] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449943, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.852887] env[65503]: DEBUG nova.scheduler.client.report [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.910697] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "1bda7a65-0231-4753-9762-43e9b13bd893" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.911091] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "1bda7a65-0231-4753-9762-43e9b13bd893" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.911401] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "1bda7a65-0231-4753-9762-43e9b13bd893-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.911676] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "1bda7a65-0231-4753-9762-43e9b13bd893-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.912483] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "1bda7a65-0231-4753-9762-43e9b13bd893-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.921484] env[65503]: INFO nova.compute.manager [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Terminating instance [ 792.936737] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 792.937033] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 793.105265] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449944, 'name': RemoveSnapshot_Task} progress is 45%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.107858] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 793.108681] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 793.162786] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449943, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55419} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.163150] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a57486e1-82e3-48d5-99fe-c89b300a2136/a57486e1-82e3-48d5-99fe-c89b300a2136.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.163398] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.163683] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-508b0f77-dfaa-4939-afdb-19ea734ff4a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.174574] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 793.174574] env[65503]: value = "task-4449945" [ 793.174574] env[65503]: _type = "Task" [ 793.174574] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.186175] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.358956] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.933s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.360021] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 793.362740] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.025s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.362954] env[65503]: DEBUG nova.objects.instance [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lazy-loading 'resources' on Instance uuid 75578ccd-2b34-4948-9afa-ac94e9fd8b4b {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 793.426524] env[65503]: DEBUG nova.compute.manager [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 793.426596] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.427932] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c31aa9-a473-4c6f-b712-87575c99b77c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.439202] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.439578] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b74fc0d2-a4ba-49bf-83ee-3d1fb24c58e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.448634] env[65503]: DEBUG oslo_vmware.api [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 793.448634] env[65503]: value = "task-4449946" [ 793.448634] env[65503]: _type = "Task" [ 793.448634] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.460055] env[65503]: DEBUG oslo_vmware.api [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449946, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.605370] env[65503]: DEBUG oslo_vmware.api [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449944, 'name': RemoveSnapshot_Task, 'duration_secs': 0.803328} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.605737] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 793.692353] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080503} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.700613] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 793.705476] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa3040d-2df2-4357-8c55-96a3aed83db1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.733131] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] a57486e1-82e3-48d5-99fe-c89b300a2136/a57486e1-82e3-48d5-99fe-c89b300a2136.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.733131] env[65503]: WARNING neutronclient.v2_0.client [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 793.733491] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 793.733827] env[65503]: WARNING openstack [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 793.745148] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-235f903d-8be0-48c1-b9e1-ae5dbab66883 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.767604] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 793.767604] env[65503]: value = "task-4449947" [ 793.767604] env[65503]: _type = "Task" [ 793.767604] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.780047] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449947, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.869027] env[65503]: DEBUG nova.compute.utils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.877623] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 793.878554] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 793.878554] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 793.879677] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 793.879677] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 793.879843] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 793.965964] env[65503]: DEBUG oslo_vmware.api [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449946, 'name': PowerOffVM_Task, 'duration_secs': 0.355386} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.968974] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 793.973025] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 793.973025] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab3b0460-550c-4200-9e52-9afc1bf848e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.049717] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.049951] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.050230] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleting the datastore file [datastore2] 1bda7a65-0231-4753-9762-43e9b13bd893 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.050518] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5691f1ea-259e-49b4-b965-d773ca57b601 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.059034] env[65503]: DEBUG oslo_vmware.api [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 794.059034] env[65503]: value = "task-4449949" [ 794.059034] env[65503]: _type = "Task" [ 794.059034] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.074461] env[65503]: DEBUG oslo_vmware.api [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449949, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.117846] env[65503]: WARNING nova.compute.manager [None req-478329ba-ee8f-4a66-8a94-ac90e23ae62e tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Image not found during snapshot: nova.exception.ImageNotFound: Image 936e1408-7ca9-49c1-8c6c-6b1727d8cb4e could not be found. [ 794.190133] env[65503]: WARNING neutronclient.v2_0.client [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 794.190836] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 794.191212] env[65503]: WARNING openstack [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 794.278869] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449947, 'name': ReconfigVM_Task, 'duration_secs': 0.484729} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.281137] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Reconfigured VM instance instance-00000034 to attach disk [datastore2] a57486e1-82e3-48d5-99fe-c89b300a2136/a57486e1-82e3-48d5-99fe-c89b300a2136.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.285391] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78607426-6203-4a72-a48c-512cf3b690d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.296089] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 794.296089] env[65503]: value = "task-4449950" [ 794.296089] env[65503]: _type = "Task" [ 794.296089] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.315014] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449950, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.379818] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 794.392733] env[65503]: DEBUG nova.policy [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffa6631776ba4f168d3d2a0168b858e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81064933e6fe4abf9c18f13d18c58037', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 794.536421] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b432529-d487-4ca1-8347-b537057656e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.549031] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47acb73d-8476-40b6-8bc0-91207f8aee04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.588559] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf90253-4fea-4075-937c-1235b2e94e2c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.603616] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f6b492-cd5c-4d45-837f-68eb159aa0d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.607580] env[65503]: DEBUG oslo_vmware.api [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4449949, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271715} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.608028] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.608118] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.608219] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.608393] env[65503]: INFO nova.compute.manager [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Took 1.18 seconds to destroy the instance on the hypervisor. [ 794.608639] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 794.609349] env[65503]: DEBUG nova.compute.manager [-] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 794.609506] env[65503]: DEBUG nova.network.neutron [-] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 794.609798] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 794.610389] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 794.610645] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 794.628646] env[65503]: DEBUG nova.compute.provider_tree [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.646354] env[65503]: DEBUG nova.network.neutron [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [{"id": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "address": "fa:16:3e:b6:69:8c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633bd812-c5", "ovs_interfaceid": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 794.813060] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449950, 'name': Rename_Task, 'duration_secs': 0.344622} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.813362] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 794.813643] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d039d726-cf6d-45e3-85bc-a3cd11d2d92b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.821064] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 794.821064] env[65503]: value = "task-4449951" [ 794.821064] env[65503]: _type = "Task" [ 794.821064] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.833388] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449951, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.993520] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Successfully created port: f956ccee-3465-49d9-8608-0bb9d01afbe6 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 795.042992] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Updated VIF entry in instance network info cache for port 264a9149-95a0-4c4c-89d4-578b30882bcb. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 795.043884] env[65503]: DEBUG nova.network.neutron [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Updating instance_info_cache with network_info: [{"id": "264a9149-95a0-4c4c-89d4-578b30882bcb", "address": "fa:16:3e:f2:6a:0e", "network": {"id": "d0d104fb-1236-45a4-8656-cb3579433b28", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1564882891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823ed6443dd4d85937ab2f08f476c8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264a9149-95", "ovs_interfaceid": "264a9149-95a0-4c4c-89d4-578b30882bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 795.108716] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 795.134619] env[65503]: DEBUG nova.scheduler.client.report [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.148862] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.333572] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449951, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.393029] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 795.422844] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 795.427056] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 795.427056] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d83372-88a9-4e3c-9f56-307c716e53a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.435120] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3652f8-d1a3-4b9c-92d2-4dc273834031 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.546711] env[65503]: DEBUG oslo_concurrency.lockutils [req-c30c5afb-a5b0-4b4d-b16c-0d270f6aaa65 req-1c536013-af85-4c2e-b1b4-cc4398506760 service nova] Releasing lock "refresh_cache-62a18449-7cec-4785-a340-d0450adc8044" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.644491] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.281s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.648144] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.638s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.650656] env[65503]: INFO nova.compute.claims [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.676085] env[65503]: INFO nova.scheduler.client.report [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Deleted allocations for instance 75578ccd-2b34-4948-9afa-ac94e9fd8b4b [ 795.678785] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67e470c-f3b2-4457-850a-320577a61901 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.706537] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae84a77-db65-456a-a79e-8c74f752f0c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.720127] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance '9809fc8b-3842-4ce3-bb63-8ea37ee3bf51' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 795.833322] env[65503]: DEBUG oslo_vmware.api [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4449951, 'name': PowerOnVM_Task, 'duration_secs': 0.983141} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.833595] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 795.833794] env[65503]: INFO nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Took 8.03 seconds to spawn the instance on the hypervisor. [ 795.833985] env[65503]: DEBUG nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 795.834939] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ae2931-7f4f-47fc-a0f5-24c51b10adc8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.189862] env[65503]: DEBUG oslo_concurrency.lockutils [None req-23afae38-65be-41d9-b584-86b1b3928fed tempest-InstanceActionsTestJSON-1036877389 tempest-InstanceActionsTestJSON-1036877389-project-member] Lock "75578ccd-2b34-4948-9afa-ac94e9fd8b4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.484s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.225784] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.226511] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-196c56df-1e1d-4f5b-befb-b6331be7c7ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.240621] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 796.240621] env[65503]: value = "task-4449952" [ 796.240621] env[65503]: _type = "Task" [ 796.240621] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.252185] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.354698] env[65503]: INFO nova.compute.manager [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Took 36.78 seconds to build instance. [ 796.443643] env[65503]: DEBUG nova.network.neutron [-] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 796.757537] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449952, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.856562] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ccae7f7c-bc20-4424-b814-4d38b084427d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.073s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.947220] env[65503]: INFO nova.compute.manager [-] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Took 2.34 seconds to deallocate network for instance. [ 797.217372] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff457ba5-6216-4e06-ba33-b007e1ae770d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.226311] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5688b45e-3cf5-4419-8d95-33cd9de6f9e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.269257] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Successfully updated port: f956ccee-3465-49d9-8608-0bb9d01afbe6 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 797.275223] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c95916-386c-4761-adc3-fe97276a6bee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.289462] env[65503]: DEBUG oslo_vmware.api [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4449952, 'name': PowerOnVM_Task, 'duration_secs': 0.696179} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.293572] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.293829] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cee181d0-ccb8-41b6-a170-62bb7c4813c3 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance '9809fc8b-3842-4ce3-bb63-8ea37ee3bf51' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 797.305734] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289a94ca-9b0b-4dd9-9799-8717d7b50d05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.328591] env[65503]: DEBUG nova.compute.provider_tree [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.458278] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.782194] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "refresh_cache-083797a8-8daf-493b-89de-7ae9137ed538" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.784461] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "refresh_cache-083797a8-8daf-493b-89de-7ae9137ed538" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.784461] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 797.832040] env[65503]: DEBUG nova.scheduler.client.report [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.993233] env[65503]: INFO nova.compute.manager [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Rebuilding instance [ 798.048523] env[65503]: DEBUG nova.compute.manager [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 798.049432] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14056df6-3c4d-45d3-9a29-8742123016e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.288934] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 798.289851] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 798.340151] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.340813] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 798.348375] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.533s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.349739] env[65503]: DEBUG nova.objects.instance [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lazy-loading 'resources' on Instance uuid ca5962fe-3e41-4fae-8860-90fa7278e0fc {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.376731] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 798.460494] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 798.460927] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 798.637025] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 798.637025] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 798.637025] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 798.854394] env[65503]: DEBUG nova.compute.utils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 798.857226] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 798.860050] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 798.860050] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 798.860050] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 798.860050] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 798.860050] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 799.005337] env[65503]: DEBUG nova.policy [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffa6631776ba4f168d3d2a0168b858e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81064933e6fe4abf9c18f13d18c58037', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 799.066277] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.071620] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5be01d8-0ec5-469a-85b6-995ff43cac6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.082700] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 799.082700] env[65503]: value = "task-4449953" [ 799.082700] env[65503]: _type = "Task" [ 799.082700] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.085212] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Updating instance_info_cache with network_info: [{"id": "f956ccee-3465-49d9-8608-0bb9d01afbe6", "address": "fa:16:3e:9e:89:52", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf956ccee-34", "ovs_interfaceid": "f956ccee-3465-49d9-8608-0bb9d01afbe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 799.106813] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.377203] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 799.381346] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.382101] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.581687] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Successfully created port: cb078d1e-65b3-4dfd-980e-792978036eb5 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 799.599327] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "refresh_cache-083797a8-8daf-493b-89de-7ae9137ed538" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.599774] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Instance network_info: |[{"id": "f956ccee-3465-49d9-8608-0bb9d01afbe6", "address": "fa:16:3e:9e:89:52", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf956ccee-34", "ovs_interfaceid": "f956ccee-3465-49d9-8608-0bb9d01afbe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 799.600200] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449953, 'name': PowerOffVM_Task, 'duration_secs': 0.380579} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.604055] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:89:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f956ccee-3465-49d9-8608-0bb9d01afbe6', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.613246] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Creating folder: Project (81064933e6fe4abf9c18f13d18c58037). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 799.614037] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 799.614037] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.614534] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bda04f59-0d71-4d1d-9efd-05b0737e1255 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.618888] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b641d04-c52c-4be2-bf63-5876bf63174c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.627371] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 799.627741] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98cc5521-4242-4fe1-ab32-14eca6c37ff6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.633057] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Created folder: Project (81064933e6fe4abf9c18f13d18c58037) in parent group-v870190. [ 799.633768] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Creating folder: Instances. Parent ref: group-v870345. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 799.637035] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-facb0cf8-1f95-4b95-a53e-cb4fb9dc55d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.651037] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Created folder: Instances in parent group-v870345. [ 799.651037] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 799.651037] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 799.651341] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64b31c04-83a4-4613-934f-76508910960c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.670956] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3f0769-8211-45d9-b0b2-297173e6ef0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.684188] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a64bb1-f425-412a-92d9-2e581e01ca0b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.686746] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.686746] env[65503]: value = "task-4449957" [ 799.686746] env[65503]: _type = "Task" [ 799.686746] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.725542] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a47a75-1a7c-4908-871a-32d2e4a6be66 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.732764] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449957, 'name': CreateVM_Task} progress is 15%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.734759] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 799.734974] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 799.735169] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleting the datastore file [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.738396] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b2ba2a9-c98d-4594-97b9-f35cb717b04b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.742294] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5546bbbf-fec1-4a45-a739-b02d8b3f87fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.749612] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 799.749612] env[65503]: value = "task-4449958" [ 799.749612] env[65503]: _type = "Task" [ 799.749612] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.764293] env[65503]: DEBUG nova.compute.provider_tree [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.770775] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.889357] env[65503]: DEBUG nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 799.995714] env[65503]: DEBUG nova.compute.manager [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Received event network-changed-b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 799.995714] env[65503]: DEBUG nova.compute.manager [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Refreshing instance network info cache due to event network-changed-b4d33353-d9f7-496e-9fb4-be39d02d940f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 799.995714] env[65503]: DEBUG oslo_concurrency.lockutils [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Acquiring lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.995714] env[65503]: DEBUG oslo_concurrency.lockutils [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Acquired lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.995954] env[65503]: DEBUG nova.network.neutron [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Refreshing network info cache for port b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 800.023039] env[65503]: DEBUG nova.compute.manager [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 800.023039] env[65503]: DEBUG nova.compute.manager [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing instance network info cache due to event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 800.023039] env[65503]: DEBUG oslo_concurrency.lockutils [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Acquiring lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.023039] env[65503]: DEBUG oslo_concurrency.lockutils [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Acquired lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.023039] env[65503]: DEBUG nova.network.neutron [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 800.200199] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449957, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.263316] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395047} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.263603] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 800.263781] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 800.263949] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 800.271203] env[65503]: DEBUG nova.scheduler.client.report [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.397797] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 800.421807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.427838] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 800.428076] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.428234] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 800.428445] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.428583] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 800.428718] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 800.428916] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.429071] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 800.429232] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 800.429385] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 800.429544] env[65503]: DEBUG nova.virt.hardware [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 800.430186] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "4c6d9dca-6b62-41ca-a033-2fa9d8d86f69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.430392] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "4c6d9dca-6b62-41ca-a033-2fa9d8d86f69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.431985] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c18e76-6dfc-4a66-92e4-9bf8c0a256d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.443555] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84054b0-1365-41a3-98a6-49dd96369483 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.503376] env[65503]: WARNING neutronclient.v2_0.client [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 800.504115] env[65503]: WARNING openstack [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 800.504724] env[65503]: WARNING openstack [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 800.526116] env[65503]: WARNING neutronclient.v2_0.client [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 800.527357] env[65503]: WARNING openstack [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 800.527357] env[65503]: WARNING openstack [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 800.655072] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.655359] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.655561] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.655735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.655895] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.659664] env[65503]: INFO nova.compute.manager [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Terminating instance [ 800.700124] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449957, 'name': CreateVM_Task, 'duration_secs': 0.546711} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.700321] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 800.700839] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 800.701277] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.701435] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.701741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 800.702153] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-609490d1-bd41-4b54-8ed1-bc34db079640 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.708902] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 800.708902] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cb3a47-65e4-d98c-837f-5f5d927bf115" [ 800.708902] env[65503]: _type = "Task" [ 800.708902] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.718531] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cb3a47-65e4-d98c-837f-5f5d927bf115, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.776571] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.428s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.779020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.638s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.779259] env[65503]: DEBUG nova.objects.instance [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lazy-loading 'resources' on Instance uuid db942a2d-671b-4036-a80b-d2375145cd29 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 800.806028] env[65503]: INFO nova.scheduler.client.report [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Deleted allocations for instance ca5962fe-3e41-4fae-8860-90fa7278e0fc [ 800.936213] env[65503]: DEBUG nova.compute.manager [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 801.164960] env[65503]: DEBUG nova.compute.manager [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 801.165424] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 801.166396] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c86ab7b-277e-4ebe-b3df-d7b2b6826016 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.175867] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.176337] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aecf7f06-56e6-4926-92fb-a887c378956a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.184596] env[65503]: DEBUG oslo_vmware.api [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 801.184596] env[65503]: value = "task-4449959" [ 801.184596] env[65503]: _type = "Task" [ 801.184596] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.195719] env[65503]: DEBUG oslo_vmware.api [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.225183] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cb3a47-65e4-d98c-837f-5f5d927bf115, 'name': SearchDatastore_Task, 'duration_secs': 0.056588} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.225541] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.225824] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.226159] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.226270] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.226362] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.226680] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-379a8bd2-fdea-43d0-9497-7385befa5990 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.238866] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.239129] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.239890] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d8a166c-4802-4fa2-8d15-1c8fffc083c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.247019] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 801.247019] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5279c4f0-e17e-6956-e5bf-b44f93051830" [ 801.247019] env[65503]: _type = "Task" [ 801.247019] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.258015] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5279c4f0-e17e-6956-e5bf-b44f93051830, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.306702] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 801.306950] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 801.307777] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 801.307777] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 801.307777] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 801.307777] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 801.308102] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.308325] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 801.308559] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 801.308782] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 801.309035] env[65503]: DEBUG nova.virt.hardware [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 801.310137] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7ccbf9-571b-4c20-b6e8-9b61f9f9c383 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.321857] env[65503]: DEBUG oslo_concurrency.lockutils [None req-080894ef-edc4-4c33-8c3e-56cf4f5f28aa tempest-ServerExternalEventsTest-882282563 tempest-ServerExternalEventsTest-882282563-project-member] Lock "ca5962fe-3e41-4fae-8860-90fa7278e0fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.201s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.328437] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286c59b0-9fd8-4cc8-be73-47514ee2d22e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.351587] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:24:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e3dc22f-5c81-48e2-9afd-c567f7010a9b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 801.361713] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 801.365775] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 801.366302] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e687b09-105c-413b-a1fe-b7c40a9f40b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.397488] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 801.397488] env[65503]: value = "task-4449960" [ 801.397488] env[65503]: _type = "Task" [ 801.397488] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.405561] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449960, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.446783] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Successfully updated port: cb078d1e-65b3-4dfd-980e-792978036eb5 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 801.461463] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.541788] env[65503]: WARNING openstack [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 801.542254] env[65503]: WARNING openstack [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 801.568866] env[65503]: WARNING openstack [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 801.569261] env[65503]: WARNING openstack [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 801.697252] env[65503]: DEBUG oslo_vmware.api [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449959, 'name': PowerOffVM_Task, 'duration_secs': 0.218644} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.697687] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.698116] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.698418] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1e92d13-e19a-44eb-b9de-c11df7f92cac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.742821] env[65503]: WARNING neutronclient.v2_0.client [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 801.743591] env[65503]: WARNING openstack [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 801.743962] env[65503]: WARNING openstack [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 801.765548] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.765829] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.766281] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleting the datastore file [datastore2] 585a3d16-ee0c-4b71-9c0d-17b4bc968d09 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.771551] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a0218aa-c929-4360-9cbf-5ffc7b7e0e1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.773814] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5279c4f0-e17e-6956-e5bf-b44f93051830, 'name': SearchDatastore_Task, 'duration_secs': 0.013963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.778108] env[65503]: WARNING neutronclient.v2_0.client [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 801.778834] env[65503]: WARNING openstack [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 801.779575] env[65503]: WARNING openstack [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 801.792177] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b01970da-9716-4f3d-b8bb-846733e40b45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.799350] env[65503]: DEBUG oslo_vmware.api [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 801.799350] env[65503]: value = "task-4449962" [ 801.799350] env[65503]: _type = "Task" [ 801.799350] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.802858] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 801.802858] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ffc90d-be1a-ca74-a43a-fdba04eb2d68" [ 801.802858] env[65503]: _type = "Task" [ 801.802858] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.822300] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ffc90d-be1a-ca74-a43a-fdba04eb2d68, 'name': SearchDatastore_Task, 'duration_secs': 0.016898} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.825702] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.826035] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 083797a8-8daf-493b-89de-7ae9137ed538/083797a8-8daf-493b-89de-7ae9137ed538.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.826595] env[65503]: DEBUG oslo_vmware.api [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.826829] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f551797b-e637-42a1-8d79-788f9316cbdf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.835752] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 801.835752] env[65503]: value = "task-4449963" [ 801.835752] env[65503]: _type = "Task" [ 801.835752] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.847695] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.907978] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449960, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.917796] env[65503]: DEBUG nova.network.neutron [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Updated VIF entry in instance network info cache for port b4d33353-d9f7-496e-9fb4-be39d02d940f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 801.919058] env[65503]: DEBUG nova.network.neutron [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Updating instance_info_cache with network_info: [{"id": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "address": "fa:16:3e:2e:c2:af", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4d33353-d9", "ovs_interfaceid": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 801.952970] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "refresh_cache-ad8676f9-0433-49bf-bc72-e36fa010ff1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.953295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "refresh_cache-ad8676f9-0433-49bf-bc72-e36fa010ff1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.953563] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 801.962427] env[65503]: DEBUG nova.network.neutron [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updated VIF entry in instance network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 801.962950] env[65503]: DEBUG nova.network.neutron [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [{"id": "0edc90ad-4b80-4fad-8456-06f696d9756a", "address": "fa:16:3e:37:71:1c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0edc90ad-4b", "ovs_interfaceid": "0edc90ad-4b80-4fad-8456-06f696d9756a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 801.980825] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9305c5e8-27d3-45cf-b0ba-447784f2a137 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.990859] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671beb20-4f48-4f2e-9c40-b8c1ad79369b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.029294] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cc5b05-c9d1-47f7-82f3-2e5670a007e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.038756] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b92e9bb-07e9-4548-901f-2d7995c2f4bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.054319] env[65503]: DEBUG nova.compute.provider_tree [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.314059] env[65503]: DEBUG oslo_vmware.api [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4449962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225328} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.314354] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 802.314583] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 802.314797] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 802.314916] env[65503]: INFO nova.compute.manager [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Took 1.15 seconds to destroy the instance on the hypervisor. [ 802.315275] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 802.315525] env[65503]: DEBUG nova.compute.manager [-] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 802.315899] env[65503]: DEBUG nova.network.neutron [-] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 802.315899] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 802.316434] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 802.316693] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 802.338712] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "31ee1061-6199-4341-86ab-9ae606b269fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.338960] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "31ee1061-6199-4341-86ab-9ae606b269fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.353938] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449963, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.413608] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449960, 'name': CreateVM_Task, 'duration_secs': 0.555538} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.414323] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 802.415131] env[65503]: WARNING neutronclient.v2_0.client [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 802.417987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.417987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.418136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 802.419068] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8d1f87f-e79e-450c-9330-62f03a1b07a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.421614] env[65503]: DEBUG oslo_concurrency.lockutils [req-8329b0e7-7927-4508-ae93-57e7e8541dde req-0a19d76f-2883-443f-acde-bd4638403aaa service nova] Releasing lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.425770] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 802.425770] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52027ee9-59f4-980a-cdc2-7f75ef31e2fa" [ 802.425770] env[65503]: _type = "Task" [ 802.425770] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.437449] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52027ee9-59f4-980a-cdc2-7f75ef31e2fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.457066] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 802.457849] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 802.473385] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 802.479552] env[65503]: DEBUG oslo_concurrency.lockutils [req-129e8927-4560-404b-b855-836488917ce1 req-62013ea2-ee35-4fd7-81b2-44b34849ebc3 service nova] Releasing lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.518599] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 802.560763] env[65503]: DEBUG nova.scheduler.client.report [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 802.776257] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 802.776743] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 802.849081] env[65503]: DEBUG nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 802.858063] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658789} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.858556] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 083797a8-8daf-493b-89de-7ae9137ed538/083797a8-8daf-493b-89de-7ae9137ed538.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 802.858932] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 802.859582] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff1ebaf2-a4a5-496e-8992-559837eb9f8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.863811] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 802.865933] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 802.866648] env[65503]: WARNING openstack [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 802.887608] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 802.887608] env[65503]: value = "task-4449964" [ 802.887608] env[65503]: _type = "Task" [ 802.887608] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.903567] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.942760] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52027ee9-59f4-980a-cdc2-7f75ef31e2fa, 'name': SearchDatastore_Task, 'duration_secs': 0.033575} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.943262] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.943617] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.943914] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.944101] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.944377] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.944709] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99413a0a-7698-4cda-9816-f9327faeffbe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.958777] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.958841] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.960409] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22ae0715-4009-4d21-95cd-7452e5ad2203 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.971699] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 802.971699] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea2631-8472-a0f9-d227-03cde5b669e7" [ 802.971699] env[65503]: _type = "Task" [ 802.971699] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.984791] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea2631-8472-a0f9-d227-03cde5b669e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.020044] env[65503]: DEBUG nova.network.neutron [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Updating instance_info_cache with network_info: [{"id": "cb078d1e-65b3-4dfd-980e-792978036eb5", "address": "fa:16:3e:1f:ca:c4", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb078d1e-65", "ovs_interfaceid": "cb078d1e-65b3-4dfd-980e-792978036eb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 803.067905] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.289s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.075020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.278s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.075020] env[65503]: INFO nova.compute.claims [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.116547] env[65503]: INFO nova.scheduler.client.report [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Deleted allocations for instance db942a2d-671b-4036-a80b-d2375145cd29 [ 803.141595] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "52701da5-2908-40f8-b1c5-bc30f17d51a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.141595] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.263588] env[65503]: DEBUG nova.network.neutron [-] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 803.374191] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.400429] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071412} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.400629] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.401553] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accde5aa-d809-4148-a14d-ad938d1ff177 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.429203] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 083797a8-8daf-493b-89de-7ae9137ed538/083797a8-8daf-493b-89de-7ae9137ed538.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.429590] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6edb81a5-94a6-41df-856b-48ddcebc3cc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.452480] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 803.452480] env[65503]: value = "task-4449965" [ 803.452480] env[65503]: _type = "Task" [ 803.452480] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.462025] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.484613] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea2631-8472-a0f9-d227-03cde5b669e7, 'name': SearchDatastore_Task, 'duration_secs': 0.045198} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.488441] env[65503]: DEBUG nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Received event network-vif-deleted-f1edfbc1-d9c4-460c-90ae-43d46d3fe56a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 803.488657] env[65503]: DEBUG nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 803.488934] env[65503]: DEBUG nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing instance network info cache due to event network-changed-0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 803.489010] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Acquiring lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.489149] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Acquired lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.489287] env[65503]: DEBUG nova.network.neutron [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Refreshing network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 803.493683] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec99c4d2-a4bb-4f63-95c9-ac9d801753a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.499377] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 803.499377] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52248f7d-8e09-2167-6065-009ca3f9485b" [ 803.499377] env[65503]: _type = "Task" [ 803.499377] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.512175] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52248f7d-8e09-2167-6065-009ca3f9485b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.515751] env[65503]: DEBUG nova.compute.manager [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Received event network-vif-plugged-f956ccee-3465-49d9-8608-0bb9d01afbe6 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 803.515751] env[65503]: DEBUG oslo_concurrency.lockutils [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Acquiring lock "083797a8-8daf-493b-89de-7ae9137ed538-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.515751] env[65503]: DEBUG oslo_concurrency.lockutils [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Lock "083797a8-8daf-493b-89de-7ae9137ed538-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.515751] env[65503]: DEBUG oslo_concurrency.lockutils [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Lock "083797a8-8daf-493b-89de-7ae9137ed538-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.515751] env[65503]: DEBUG nova.compute.manager [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] No waiting events found dispatching network-vif-plugged-f956ccee-3465-49d9-8608-0bb9d01afbe6 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 803.515751] env[65503]: WARNING nova.compute.manager [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Received unexpected event network-vif-plugged-f956ccee-3465-49d9-8608-0bb9d01afbe6 for instance with vm_state building and task_state spawning. [ 803.516255] env[65503]: DEBUG nova.compute.manager [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Received event network-changed-f956ccee-3465-49d9-8608-0bb9d01afbe6 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 803.518298] env[65503]: DEBUG nova.compute.manager [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Refreshing instance network info cache due to event network-changed-f956ccee-3465-49d9-8608-0bb9d01afbe6. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 803.518569] env[65503]: DEBUG oslo_concurrency.lockutils [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Acquiring lock "refresh_cache-083797a8-8daf-493b-89de-7ae9137ed538" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.518706] env[65503]: DEBUG oslo_concurrency.lockutils [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Acquired lock "refresh_cache-083797a8-8daf-493b-89de-7ae9137ed538" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.518862] env[65503]: DEBUG nova.network.neutron [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Refreshing network info cache for port f956ccee-3465-49d9-8608-0bb9d01afbe6 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 803.523414] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "refresh_cache-ad8676f9-0433-49bf-bc72-e36fa010ff1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.524174] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Instance network_info: |[{"id": "cb078d1e-65b3-4dfd-980e-792978036eb5", "address": "fa:16:3e:1f:ca:c4", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb078d1e-65", "ovs_interfaceid": "cb078d1e-65b3-4dfd-980e-792978036eb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 803.525556] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:ca:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb078d1e-65b3-4dfd-980e-792978036eb5', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.535730] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 803.538293] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.538469] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74f05efd-2ec6-4453-8d0d-90c745e37f17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.561616] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.561616] env[65503]: value = "task-4449966" [ 803.561616] env[65503]: _type = "Task" [ 803.561616] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.573932] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449966, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.630161] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fb92db92-9d7e-4411-aad8-482d47c79120 tempest-VolumesAssistedSnapshotsTest-637759821 tempest-VolumesAssistedSnapshotsTest-637759821-project-member] Lock "db942a2d-671b-4036-a80b-d2375145cd29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.831s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.693962] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.769782] env[65503]: INFO nova.compute.manager [-] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Took 1.45 seconds to deallocate network for instance. [ 803.964937] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449965, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.993239] env[65503]: WARNING neutronclient.v2_0.client [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 803.993991] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 803.994361] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 804.013544] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52248f7d-8e09-2167-6065-009ca3f9485b, 'name': SearchDatastore_Task, 'duration_secs': 0.014009} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.013719] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.013987] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 804.014297] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5806b226-f9ee-4f5e-b3f7-467d13fb2613 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.024272] env[65503]: WARNING neutronclient.v2_0.client [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 804.024778] env[65503]: WARNING openstack [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 804.025204] env[65503]: WARNING openstack [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 804.033419] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 804.033419] env[65503]: value = "task-4449967" [ 804.033419] env[65503]: _type = "Task" [ 804.033419] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.044294] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.073207] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449966, 'name': CreateVM_Task, 'duration_secs': 0.446318} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.073207] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.073207] env[65503]: WARNING neutronclient.v2_0.client [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 804.073207] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.073474] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.073587] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 804.073819] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a9daad2-8a36-440d-bb3e-786f26ba37c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.080125] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 804.080125] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523dc1e4-c2ec-e5a7-8f61-4aab0f9b1ac0" [ 804.080125] env[65503]: _type = "Task" [ 804.080125] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.097088] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523dc1e4-c2ec-e5a7-8f61-4aab0f9b1ac0, 'name': SearchDatastore_Task} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.097536] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.097856] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.098177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.098378] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.098610] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.098947] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f06b6a9-ed2a-4470-af14-96a14f7819b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.109335] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.109538] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 804.110434] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63feae0d-8412-4a24-a98a-d0931d917246 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.125491] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 804.125491] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5243e21d-adf6-f44b-bb52-2bd8543b2bed" [ 804.125491] env[65503]: _type = "Task" [ 804.125491] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.145672] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5243e21d-adf6-f44b-bb52-2bd8543b2bed, 'name': SearchDatastore_Task, 'duration_secs': 0.010187} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.146750] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-249da1c4-b7ed-40bd-a697-8aa4abdcc722 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.155107] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 804.155107] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522c2506-1a96-9dd3-6d18-7a4715f15e49" [ 804.155107] env[65503]: _type = "Task" [ 804.155107] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.169509] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522c2506-1a96-9dd3-6d18-7a4715f15e49, 'name': SearchDatastore_Task, 'duration_secs': 0.011967} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.169786] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.170051] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ad8676f9-0433-49bf-bc72-e36fa010ff1d/ad8676f9-0433-49bf-bc72-e36fa010ff1d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 804.170326] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c649c2a2-5af6-4984-a6b8-23e4b61aae37 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.180238] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 804.180238] env[65503]: value = "task-4449968" [ 804.180238] env[65503]: _type = "Task" [ 804.180238] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.190569] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.284696] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.383804] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 804.383804] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 804.408341] env[65503]: WARNING openstack [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 804.408603] env[65503]: WARNING openstack [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 804.466215] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449965, 'name': ReconfigVM_Task, 'duration_secs': 0.783494} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.466563] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 083797a8-8daf-493b-89de-7ae9137ed538/083797a8-8daf-493b-89de-7ae9137ed538.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.469997] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9728c48f-5410-408b-9e7b-cedcbc6a6ed0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.485164] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 804.485164] env[65503]: value = "task-4449969" [ 804.485164] env[65503]: _type = "Task" [ 804.485164] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.499036] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449969, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.549558] env[65503]: WARNING neutronclient.v2_0.client [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 804.550291] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 804.550687] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 804.563946] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449967, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.581743] env[65503]: WARNING neutronclient.v2_0.client [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 804.583403] env[65503]: WARNING openstack [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 804.583403] env[65503]: WARNING openstack [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 804.711683] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449968, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.748475] env[65503]: DEBUG nova.network.neutron [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updated VIF entry in instance network info cache for port 0edc90ad-4b80-4fad-8456-06f696d9756a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 804.748917] env[65503]: DEBUG nova.network.neutron [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [{"id": "0edc90ad-4b80-4fad-8456-06f696d9756a", "address": "fa:16:3e:37:71:1c", "network": {"id": "9b0ccd37-b92a-4bb9-bd80-6ac034d3105c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1077241899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2254fd86b74662975d3ad1fa4b0f74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0edc90ad-4b", "ovs_interfaceid": "0edc90ad-4b80-4fad-8456-06f696d9756a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 804.791111] env[65503]: DEBUG nova.network.neutron [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Updated VIF entry in instance network info cache for port f956ccee-3465-49d9-8608-0bb9d01afbe6. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 804.791111] env[65503]: DEBUG nova.network.neutron [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Updating instance_info_cache with network_info: [{"id": "f956ccee-3465-49d9-8608-0bb9d01afbe6", "address": "fa:16:3e:9e:89:52", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf956ccee-34", "ovs_interfaceid": "f956ccee-3465-49d9-8608-0bb9d01afbe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 804.893344] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788894fd-edd5-4e43-9360-a7273fb92c04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.903634] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5482334-5527-45bb-9688-aebfd7c3ae3a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.937230] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcefc1e6-a663-4253-adef-d3f149d32b42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.947381] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5041fb42-d825-4697-893d-c3c362d027ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.964911] env[65503]: DEBUG nova.compute.provider_tree [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.982385] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.003420] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449969, 'name': Rename_Task, 'duration_secs': 0.194254} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.005531] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 805.006301] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11f94d73-2b5c-4596-b8fe-fd9c0104a829 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.016017] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 805.016017] env[65503]: value = "task-4449970" [ 805.016017] env[65503]: _type = "Task" [ 805.016017] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.027720] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.045862] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449967, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56038} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.046250] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 805.046503] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 805.046777] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-910f7194-d3b7-4a79-84a8-59ebebd9760f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.055667] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 805.055667] env[65503]: value = "task-4449971" [ 805.055667] env[65503]: _type = "Task" [ 805.055667] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.068358] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449971, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.161905] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.161905] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.200029] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.830571} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.200029] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ad8676f9-0433-49bf-bc72-e36fa010ff1d/ad8676f9-0433-49bf-bc72-e36fa010ff1d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 805.200029] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 805.200029] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5cc2102-44a6-4f4b-ba90-3ca040efff2b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.204171] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-a197b590-1f74-4241-9579-2f2d3bb89a1d-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.204485] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-a197b590-1f74-4241-9579-2f2d3bb89a1d-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.204993] env[65503]: DEBUG nova.objects.instance [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'flavor' on Instance uuid a197b590-1f74-4241-9579-2f2d3bb89a1d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.212126] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 805.212126] env[65503]: value = "task-4449972" [ 805.212126] env[65503]: _type = "Task" [ 805.212126] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.224914] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.238256] env[65503]: DEBUG nova.network.neutron [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Port 633bd812-c51f-4ae0-bab2-ced08b56a04b binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 805.238590] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.238755] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.239027] env[65503]: DEBUG nova.network.neutron [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 805.257180] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Releasing lock "refresh_cache-45a4b511-aa6a-433d-b136-f53686db9575" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.257508] env[65503]: DEBUG nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Received event network-vif-plugged-cb078d1e-65b3-4dfd-980e-792978036eb5 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 805.257633] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Acquiring lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.258085] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.258085] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.258300] env[65503]: DEBUG nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] No waiting events found dispatching network-vif-plugged-cb078d1e-65b3-4dfd-980e-792978036eb5 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 805.258397] env[65503]: WARNING nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Received unexpected event network-vif-plugged-cb078d1e-65b3-4dfd-980e-792978036eb5 for instance with vm_state building and task_state spawning. [ 805.258552] env[65503]: DEBUG nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Received event network-changed-cb078d1e-65b3-4dfd-980e-792978036eb5 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 805.258692] env[65503]: DEBUG nova.compute.manager [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Refreshing instance network info cache due to event network-changed-cb078d1e-65b3-4dfd-980e-792978036eb5. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 805.258863] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Acquiring lock "refresh_cache-ad8676f9-0433-49bf-bc72-e36fa010ff1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.259024] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Acquired lock "refresh_cache-ad8676f9-0433-49bf-bc72-e36fa010ff1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.259171] env[65503]: DEBUG nova.network.neutron [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Refreshing network info cache for port cb078d1e-65b3-4dfd-980e-792978036eb5 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 805.301448] env[65503]: DEBUG oslo_concurrency.lockutils [req-5812339e-06ec-4ba7-9c67-87e0cea62066 req-a511cc59-0f57-4b84-b9be-52ce43ec07ed service nova] Releasing lock "refresh_cache-083797a8-8daf-493b-89de-7ae9137ed538" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.419126] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "429b7542-c288-4a7a-9032-09881938b256" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.419367] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "429b7542-c288-4a7a-9032-09881938b256" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.468664] env[65503]: DEBUG nova.scheduler.client.report [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.530326] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449970, 'name': PowerOnVM_Task} progress is 96%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.571726] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449971, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079006} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.572238] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 805.574297] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93062ceb-49d3-47c6-b496-4685feedcb41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.618206] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.618781] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be352ad8-138a-401a-b875-7c86568836b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.643665] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 805.643665] env[65503]: value = "task-4449973" [ 805.643665] env[65503]: _type = "Task" [ 805.643665] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.660736] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449973, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.714615] env[65503]: WARNING neutronclient.v2_0.client [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.715744] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 805.716214] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 805.737682] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161125} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.737916] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 805.738794] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7b8347-6368-4551-b823-a54c538f5dc9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.744801] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.745516] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 805.745864] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 805.774335] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] ad8676f9-0433-49bf-bc72-e36fa010ff1d/ad8676f9-0433-49bf-bc72-e36fa010ff1d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.779564] env[65503]: WARNING neutronclient.v2_0.client [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.780171] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 805.780510] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 805.789719] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9e20a9f-5247-4422-a6eb-d5ecb903919f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.822034] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 805.822034] env[65503]: value = "task-4449974" [ 805.822034] env[65503]: _type = "Task" [ 805.822034] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.833205] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449974, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.884545] env[65503]: DEBUG nova.objects.instance [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'pci_requests' on Instance uuid a197b590-1f74-4241-9579-2f2d3bb89a1d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.975615] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.905s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.978743] env[65503]: DEBUG nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 805.979446] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.650s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.979634] env[65503]: DEBUG nova.objects.instance [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lazy-loading 'resources' on Instance uuid f8d61ded-ddf7-4ec9-88e7-92ffb6934733 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.018939] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 806.018939] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 806.032684] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 806.033093] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 806.046731] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449970, 'name': PowerOnVM_Task, 'duration_secs': 0.562448} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.047030] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 806.047262] env[65503]: INFO nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Took 10.66 seconds to spawn the instance on the hypervisor. [ 806.047527] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 806.048408] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542558a4-09fd-47f7-bc9f-9a0f59976541 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.137293] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 806.138591] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 806.139070] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 806.160107] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.164937] env[65503]: WARNING neutronclient.v2_0.client [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 806.165805] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 806.166235] env[65503]: WARNING openstack [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 806.331790] env[65503]: DEBUG nova.network.neutron [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [{"id": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "address": "fa:16:3e:b6:69:8c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633bd812-c5", "ovs_interfaceid": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 806.337897] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.390157] env[65503]: DEBUG nova.objects.base [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 806.390157] env[65503]: DEBUG nova.network.neutron [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 806.390157] env[65503]: WARNING neutronclient.v2_0.client [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 806.390386] env[65503]: WARNING neutronclient.v2_0.client [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 806.391106] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 806.391458] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 806.426787] env[65503]: DEBUG nova.network.neutron [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Updated VIF entry in instance network info cache for port cb078d1e-65b3-4dfd-980e-792978036eb5. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 806.427259] env[65503]: DEBUG nova.network.neutron [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Updating instance_info_cache with network_info: [{"id": "cb078d1e-65b3-4dfd-980e-792978036eb5", "address": "fa:16:3e:1f:ca:c4", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb078d1e-65", "ovs_interfaceid": "cb078d1e-65b3-4dfd-980e-792978036eb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 806.486251] env[65503]: DEBUG nova.compute.utils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 806.488515] env[65503]: DEBUG nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 806.499154] env[65503]: DEBUG nova.policy [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 806.574377] env[65503]: INFO nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Took 41.88 seconds to build instance. [ 806.666823] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449973, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.835937] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449974, 'name': ReconfigVM_Task, 'duration_secs': 0.89253} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.837104] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Reconfigured VM instance instance-00000036 to attach disk [datastore1] ad8676f9-0433-49bf-bc72-e36fa010ff1d/ad8676f9-0433-49bf-bc72-e36fa010ff1d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.838496] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b6d0772-0b25-4b29-a613-e7874dac0447 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.843724] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.854660] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 806.854660] env[65503]: value = "task-4449975" [ 806.854660] env[65503]: _type = "Task" [ 806.854660] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.857577] env[65503]: DEBUG nova.network.neutron [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Successfully created port: 859e5cfc-c759-44b5-9f3e-fc7a2532d1aa {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 806.876664] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449975, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.934628] env[65503]: DEBUG oslo_concurrency.lockutils [req-26a88f71-12db-4571-88e4-652f739f8f7a req-a575871e-6c6a-4251-9fcf-875d6b569c98 service nova] Releasing lock "refresh_cache-ad8676f9-0433-49bf-bc72-e36fa010ff1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.990404] env[65503]: DEBUG nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 807.078790] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "083797a8-8daf-493b-89de-7ae9137ed538" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.053s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.152898] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d6821c-7030-4a00-937f-f94355024aec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.172382] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4832b761-e60e-42fa-a693-0bf4c78904cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.176790] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449973, 'name': ReconfigVM_Task, 'duration_secs': 1.03714} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.177649] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Reconfigured VM instance instance-00000030 to attach disk [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94/a22f589e-7c40-4023-9a4c-9ab2a76faa94.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.178525] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d26051cc-e59c-466d-bab8-fd0272e2ff27 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.207904] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1101b33b-588d-4517-805a-19db3b8ac643 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.211908] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 807.211908] env[65503]: value = "task-4449976" [ 807.211908] env[65503]: _type = "Task" [ 807.211908] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.219832] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37e8db2-e680-4fd6-bb99-072066a2e2d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.230189] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449976, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.241265] env[65503]: DEBUG nova.compute.provider_tree [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.347186] env[65503]: DEBUG nova.compute.manager [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=65503) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:925}} [ 807.347411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.372703] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449975, 'name': Rename_Task, 'duration_secs': 0.142612} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.372703] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.372703] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4f85d48-c563-4c45-b2ca-acc84254bfc1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.381382] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 807.381382] env[65503]: value = "task-4449977" [ 807.381382] env[65503]: _type = "Task" [ 807.381382] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.390034] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.585437] env[65503]: DEBUG nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 807.605969] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.605969] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.605969] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.605969] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.605969] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.608232] env[65503]: INFO nova.compute.manager [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Terminating instance [ 807.722748] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449976, 'name': Rename_Task, 'duration_secs': 0.145679} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.723477] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.723849] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-229c6242-0d36-436d-87c3-3b257e6a721b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.731737] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 807.731737] env[65503]: value = "task-4449978" [ 807.731737] env[65503]: _type = "Task" [ 807.731737] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.742539] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.745817] env[65503]: DEBUG nova.scheduler.client.report [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.893599] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449977, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.002230] env[65503]: DEBUG nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 808.034148] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 808.034472] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.034836] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 808.035084] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.035273] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 808.035455] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 808.035701] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 808.035878] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 808.036108] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 808.036309] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 808.036516] env[65503]: DEBUG nova.virt.hardware [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 808.037786] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1caf2d1-b33b-4cce-bdc1-e9a33ec53210 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.048202] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275abb6d-49cd-4348-a480-5aa82606fe9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.069424] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.079316] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Creating folder: Project (93aaf94058114928a702e72dee236801). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.079787] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-740f6738-b27c-40c3-8563-f8cd6d87ff32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.092848] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Created folder: Project (93aaf94058114928a702e72dee236801) in parent group-v870190. [ 808.093062] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Creating folder: Instances. Parent ref: group-v870350. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.093327] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-393817ca-e348-4717-802a-043e5a850223 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.109261] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Created folder: Instances in parent group-v870350. [ 808.109546] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 808.109765] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.109995] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ed50494-0802-4225-9021-5a8ffda98a50 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.127955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.128770] env[65503]: DEBUG nova.compute.manager [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 808.129037] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 808.130055] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a53edc-db9f-4463-8956-a92b95a3846f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.140361] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 808.142716] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04b0aa76-f4be-456a-979d-82ff8c392b34 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.146674] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.146674] env[65503]: value = "task-4449981" [ 808.146674] env[65503]: _type = "Task" [ 808.146674] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.157268] env[65503]: DEBUG oslo_vmware.api [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 808.157268] env[65503]: value = "task-4449982" [ 808.157268] env[65503]: _type = "Task" [ 808.157268] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.170028] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449981, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.174658] env[65503]: DEBUG oslo_vmware.api [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449982, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.244604] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449978, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.253533] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.273s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.256037] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.725s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.256292] env[65503]: DEBUG nova.objects.instance [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 808.286040] env[65503]: INFO nova.scheduler.client.report [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Deleted allocations for instance f8d61ded-ddf7-4ec9-88e7-92ffb6934733 [ 808.401682] env[65503]: DEBUG oslo_vmware.api [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449977, 'name': PowerOnVM_Task, 'duration_secs': 0.942001} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.402601] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 808.402601] env[65503]: INFO nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Took 8.00 seconds to spawn the instance on the hypervisor. [ 808.402601] env[65503]: DEBUG nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 808.403438] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa08b5b-4e2f-48f7-afd4-6ca08066993c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.425378] env[65503]: DEBUG nova.compute.manager [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Received event network-vif-deleted-2a598d20-dc23-42a4-9d99-962df4c4d391 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 808.425786] env[65503]: DEBUG nova.compute.manager [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Received event network-changed-b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 808.426074] env[65503]: DEBUG nova.compute.manager [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Refreshing instance network info cache due to event network-changed-b4d33353-d9f7-496e-9fb4-be39d02d940f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 808.427672] env[65503]: DEBUG oslo_concurrency.lockutils [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Acquiring lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.427835] env[65503]: DEBUG oslo_concurrency.lockutils [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Acquired lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.428170] env[65503]: DEBUG nova.network.neutron [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Refreshing network info cache for port b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 808.560111] env[65503]: DEBUG nova.network.neutron [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Successfully updated port: 859e5cfc-c759-44b5-9f3e-fc7a2532d1aa {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 808.659631] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4449981, 'name': CreateVM_Task, 'duration_secs': 0.374693} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.662917] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 808.663451] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.663610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.663922] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 808.664611] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50cbcad3-e1ac-4700-9c93-84e30188838f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.670149] env[65503]: DEBUG oslo_vmware.api [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449982, 'name': PowerOffVM_Task, 'duration_secs': 0.287316} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.671804] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.672058] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 808.672265] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 808.672265] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fc57b1-2e13-bdc8-1375-957ed735b8d1" [ 808.672265] env[65503]: _type = "Task" [ 808.672265] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.672495] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb329e6b-45a7-4bf1-a9cf-30d9fa3d0f8e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.685665] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fc57b1-2e13-bdc8-1375-957ed735b8d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.743907] env[65503]: DEBUG oslo_vmware.api [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449978, 'name': PowerOnVM_Task, 'duration_secs': 0.891752} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.744210] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 808.744419] env[65503]: DEBUG nova.compute.manager [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 808.745439] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2ed1a9-ccc5-4f04-9972-7f7dd14578b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.752075] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 808.752075] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 808.752075] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleting the datastore file [datastore2] 606b8e9f-67c0-4d5c-85ab-ca35f8b31977 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 808.754557] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ad485b9-d760-48d8-be2e-73523aa1e358 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.768336] env[65503]: DEBUG oslo_vmware.api [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 808.768336] env[65503]: value = "task-4449984" [ 808.768336] env[65503]: _type = "Task" [ 808.768336] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.779579] env[65503]: DEBUG oslo_vmware.api [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.794533] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b7e70d75-1ad5-4655-ab66-9752745e08f4 tempest-ServersTestMultiNic-148125074 tempest-ServersTestMultiNic-148125074-project-member] Lock "f8d61ded-ddf7-4ec9-88e7-92ffb6934733" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.918s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.925925] env[65503]: INFO nova.compute.manager [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Took 42.94 seconds to build instance. [ 808.933091] env[65503]: WARNING neutronclient.v2_0.client [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 808.933621] env[65503]: WARNING openstack [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 808.933960] env[65503]: WARNING openstack [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.064345] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.064345] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.064345] env[65503]: DEBUG nova.network.neutron [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 809.123297] env[65503]: WARNING openstack [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.123717] env[65503]: WARNING openstack [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.187466] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fc57b1-2e13-bdc8-1375-957ed735b8d1, 'name': SearchDatastore_Task, 'duration_secs': 0.028522} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.187859] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.188170] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.188469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.188581] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.188783] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.189129] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4e66279-98bc-4d34-8054-6767c8a3b29c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.203349] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.203606] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.204626] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74fcb094-3edd-43dd-b14a-4c704361d4a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.211178] env[65503]: WARNING neutronclient.v2_0.client [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.212623] env[65503]: WARNING openstack [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.213137] env[65503]: WARNING openstack [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.226335] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 809.226335] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c9eea-43d0-2a58-613b-cccbeb309c5a" [ 809.226335] env[65503]: _type = "Task" [ 809.226335] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.240578] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524c9eea-43d0-2a58-613b-cccbeb309c5a, 'name': SearchDatastore_Task, 'duration_secs': 0.015835} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.241515] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39a05bcb-65c6-4ab2-802f-606e8b4928ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.249226] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 809.249226] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52718701-3d18-a691-3438-8faddfaf8bee" [ 809.249226] env[65503]: _type = "Task" [ 809.249226] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.264485] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52718701-3d18-a691-3438-8faddfaf8bee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.267163] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.270878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ec62ed1-3bcf-42b2-8822-00a39cf10e87 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.272236] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.526s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.272236] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.272388] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 809.273048] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.014s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.273048] env[65503]: DEBUG nova.objects.instance [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lazy-loading 'resources' on Instance uuid 5cefb589-9947-4fc1-89b4-d888f8c8f644 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.278573] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7008adfb-0aaa-4329-8ddf-49c8bca167a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.293596] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207cb54c-5917-4846-a280-d94c16d1f38f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.298898] env[65503]: DEBUG oslo_vmware.api [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4449984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374429} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.300287] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 809.300527] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 809.301846] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 809.301846] env[65503]: INFO nova.compute.manager [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Took 1.17 seconds to destroy the instance on the hypervisor. [ 809.301846] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 809.303062] env[65503]: DEBUG nova.compute.manager [-] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 809.303349] env[65503]: DEBUG nova.network.neutron [-] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 809.303780] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.304466] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.304803] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.326590] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69d615b-5f79-414e-bfb1-30c94b162c39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.334237] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15428647-94a6-406e-8bc0-79d5880c12c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.368432] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178216MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 809.368579] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.400223] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.428687] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3dda3d35-49c5-4cb9-ba0a-221f6ce8eee6 tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.362s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.445586] env[65503]: DEBUG nova.network.neutron [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Updated VIF entry in instance network info cache for port b4d33353-d9f7-496e-9fb4-be39d02d940f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 809.446031] env[65503]: DEBUG nova.network.neutron [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Updating instance_info_cache with network_info: [{"id": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "address": "fa:16:3e:2e:c2:af", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4d33353-d9", "ovs_interfaceid": "b4d33353-d9f7-496e-9fb4-be39d02d940f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 809.567975] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.568410] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.591884] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.592133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.746324] env[65503]: WARNING nova.network.neutron [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] d2c28c8e-55de-416b-97e1-c5ea06e7f107 already exists in list: networks containing: ['d2c28c8e-55de-416b-97e1-c5ea06e7f107']. ignoring it [ 809.761787] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52718701-3d18-a691-3438-8faddfaf8bee, 'name': SearchDatastore_Task, 'duration_secs': 0.014663} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.762462] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.762578] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 809.762876] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52ad7a86-bf1d-4e20-a239-a2b1307e63b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.770897] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 809.770897] env[65503]: value = "task-4449985" [ 809.770897] env[65503]: _type = "Task" [ 809.770897] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.785962] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.796727] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.797209] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.886252] env[65503]: WARNING neutronclient.v2_0.client [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.886867] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.887831] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.932542] env[65503]: DEBUG nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 809.949718] env[65503]: DEBUG oslo_concurrency.lockutils [req-983ceadd-776c-45b2-b4c0-e88fc98c5346 req-1b8427b6-411f-4182-a90b-71a7723e7d7f service nova] Releasing lock "refresh_cache-a57486e1-82e3-48d5-99fe-c89b300a2136" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.001675] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "0001f4db-3073-411c-8d60-6d8528ef263a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.001906] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "0001f4db-3073-411c-8d60-6d8528ef263a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.198107] env[65503]: DEBUG nova.network.neutron [-] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 810.290053] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449985, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.340027] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 810.340548] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 810.459723] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.521565] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87747483-b54e-4c5b-ba3a-c528592a7a5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.533734] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172d171e-4584-4c36-af3d-3a965f413b23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.544112] env[65503]: WARNING neutronclient.v2_0.client [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 810.544112] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 810.544112] env[65503]: WARNING openstack [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 810.606739] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f18542-1107-4644-b296-c8884a28e1f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.619459] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e4d706-7422-4768-a6eb-e25158a0181d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.639817] env[65503]: DEBUG nova.compute.provider_tree [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.703136] env[65503]: INFO nova.compute.manager [-] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Took 1.40 seconds to deallocate network for instance. [ 810.786427] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.764688} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.786788] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.786889] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.787131] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-059f23d0-b188-4d37-8ed4-94428704e4d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.796578] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 810.796578] env[65503]: value = "task-4449986" [ 810.796578] env[65503]: _type = "Task" [ 810.796578] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.808451] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.842853] env[65503]: DEBUG nova.network.neutron [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "859e5cfc-c759-44b5-9f3e-fc7a2532d1aa", "address": "fa:16:3e:d6:b9:72", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap859e5cfc-c7", "ovs_interfaceid": "859e5cfc-c759-44b5-9f3e-fc7a2532d1aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 811.143097] env[65503]: DEBUG nova.scheduler.client.report [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 811.210250] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.310738] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072192} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.310738] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.311572] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc250621-8418-4f06-9b0b-86f047a1cfa9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.333828] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.334211] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba319a96-0b45-4313-9dbf-8cdaa81f551d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.349136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.350298] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.350298] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.350912] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5311a23d-d83f-40c1-9378-e3fb981847fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.372337] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 811.374638] env[65503]: DEBUG nova.virt.hardware [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 811.380883] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Reconfiguring VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 811.382789] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd862a76-e7ba-4077-884e-de2d1816613c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.399088] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 811.399088] env[65503]: value = "task-4449987" [ 811.399088] env[65503]: _type = "Task" [ 811.399088] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.404437] env[65503]: DEBUG oslo_vmware.api [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 811.404437] env[65503]: value = "task-4449988" [ 811.404437] env[65503]: _type = "Task" [ 811.404437] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.409383] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.420627] env[65503]: DEBUG oslo_vmware.api [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449988, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.562725] env[65503]: DEBUG nova.compute.manager [req-ab759cef-f921-4436-90b5-4092b8a0e97e req-a8ac10c3-2e55-4c9f-a8e7-233be2f317ef service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received event network-vif-plugged-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 811.563037] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab759cef-f921-4436-90b5-4092b8a0e97e req-a8ac10c3-2e55-4c9f-a8e7-233be2f317ef service nova] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.563287] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab759cef-f921-4436-90b5-4092b8a0e97e req-a8ac10c3-2e55-4c9f-a8e7-233be2f317ef service nova] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.563508] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab759cef-f921-4436-90b5-4092b8a0e97e req-a8ac10c3-2e55-4c9f-a8e7-233be2f317ef service nova] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.563724] env[65503]: DEBUG nova.compute.manager [req-ab759cef-f921-4436-90b5-4092b8a0e97e req-a8ac10c3-2e55-4c9f-a8e7-233be2f317ef service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] No waiting events found dispatching network-vif-plugged-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 811.563932] env[65503]: WARNING nova.compute.manager [req-ab759cef-f921-4436-90b5-4092b8a0e97e req-a8ac10c3-2e55-4c9f-a8e7-233be2f317ef service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received unexpected event network-vif-plugged-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa for instance with vm_state active and task_state None. [ 811.650961] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.378s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.654127] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.654s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.655764] env[65503]: INFO nova.compute.claims [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.678663] env[65503]: INFO nova.scheduler.client.report [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Deleted allocations for instance 5cefb589-9947-4fc1-89b4-d888f8c8f644 [ 811.909237] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449987, 'name': ReconfigVM_Task, 'duration_secs': 0.301262} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.912932] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Reconfigured VM instance instance-00000037 to attach disk [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.913568] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1fb37c9c-b4f2-4ca8-826d-3782d1638c03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.926183] env[65503]: DEBUG oslo_vmware.api [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449988, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.928018] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 811.928018] env[65503]: value = "task-4449989" [ 811.928018] env[65503]: _type = "Task" [ 811.928018] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.939975] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449989, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.187082] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e0c57cbd-2645-4007-a93d-dc368397a6f0 tempest-ServerPasswordTestJSON-918419207 tempest-ServerPasswordTestJSON-918419207-project-member] Lock "5cefb589-9947-4fc1-89b4-d888f8c8f644" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.877s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.420018] env[65503]: DEBUG oslo_vmware.api [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449988, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.440221] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449989, 'name': Rename_Task, 'duration_secs': 0.149852} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.440607] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 812.440925] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb6dd1af-3ce4-4551-b0d6-fdeff53928e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.450554] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 812.450554] env[65503]: value = "task-4449990" [ 812.450554] env[65503]: _type = "Task" [ 812.450554] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.466385] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.921165] env[65503]: DEBUG oslo_vmware.api [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4449988, 'name': ReconfigVM_Task, 'duration_secs': 1.405774} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.924442] env[65503]: WARNING neutronclient.v2_0.client [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 812.924705] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.924916] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Reconfigured VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 812.963339] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449990, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.209237] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a511ec-67db-478d-96fd-5e247b1a261d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.217871] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6b1de3-da06-4768-8fc7-a2bb58fd0850 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.251734] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e43e55-d9cc-4f5c-8c68-34ee963d6970 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.260813] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c4d90f-7d86-4655-a4cd-b2cece5ac8cf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.277803] env[65503]: DEBUG nova.compute.provider_tree [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.430250] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04552605-2287-4443-b7d7-4cd2dafdb193 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-a197b590-1f74-4241-9579-2f2d3bb89a1d-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.226s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.469590] env[65503]: DEBUG oslo_vmware.api [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4449990, 'name': PowerOnVM_Task, 'duration_secs': 0.530449} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.469590] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.469590] env[65503]: INFO nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Took 5.47 seconds to spawn the instance on the hypervisor. [ 813.469590] env[65503]: DEBUG nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 813.469590] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994ba362-a545-4c1e-a312-7edb36310e87 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.500030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.501137] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.004s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.501434] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.501695] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.501905] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.504616] env[65503]: INFO nova.compute.manager [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Terminating instance [ 813.781806] env[65503]: DEBUG nova.scheduler.client.report [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.952871] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.952871] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.953352] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.953352] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.953429] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.955872] env[65503]: INFO nova.compute.manager [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Terminating instance [ 813.990163] env[65503]: INFO nova.compute.manager [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Took 45.21 seconds to build instance. [ 814.008627] env[65503]: DEBUG nova.compute.manager [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 814.008853] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.010060] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b22ae86-f65a-42d2-a6aa-fe337d36e966 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.019902] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.020154] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15e5dbee-9c97-46b6-911e-4611b88b31ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.030425] env[65503]: DEBUG oslo_vmware.api [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 814.030425] env[65503]: value = "task-4449991" [ 814.030425] env[65503]: _type = "Task" [ 814.030425] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.045102] env[65503]: DEBUG oslo_vmware.api [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.275273] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "083797a8-8daf-493b-89de-7ae9137ed538" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.275539] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "083797a8-8daf-493b-89de-7ae9137ed538" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.275727] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "083797a8-8daf-493b-89de-7ae9137ed538-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.275908] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "083797a8-8daf-493b-89de-7ae9137ed538-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.276087] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "083797a8-8daf-493b-89de-7ae9137ed538-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.279624] env[65503]: INFO nova.compute.manager [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Terminating instance [ 814.289436] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.289436] env[65503]: DEBUG nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 814.293842] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.292s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.298596] env[65503]: INFO nova.compute.claims [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.460239] env[65503]: DEBUG nova.compute.manager [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 814.460502] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.464207] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f6b79a-dd5c-40e7-a690-a907a647c680 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.479020] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.479020] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e63d50f-a0bc-420c-9649-69a8e091d027 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.486341] env[65503]: DEBUG oslo_vmware.api [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 814.486341] env[65503]: value = "task-4449992" [ 814.486341] env[65503]: _type = "Task" [ 814.486341] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.492389] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e41542d7-37d7-4a31-a496-e83f059239c9 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "e74fe378-737a-4732-9a2d-b889a436b8a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.868s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.499145] env[65503]: DEBUG oslo_vmware.api [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.545631] env[65503]: DEBUG oslo_vmware.api [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449991, 'name': PowerOffVM_Task, 'duration_secs': 0.242198} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.546024] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 814.546209] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 814.546517] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e24a447c-d3d6-4e4c-a507-9721884849ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.778402] env[65503]: DEBUG nova.compute.manager [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received event network-changed-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 814.779138] env[65503]: DEBUG nova.compute.manager [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Refreshing instance network info cache due to event network-changed-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 814.780420] env[65503]: DEBUG oslo_concurrency.lockutils [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Acquiring lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.780420] env[65503]: DEBUG oslo_concurrency.lockutils [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Acquired lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.780420] env[65503]: DEBUG nova.network.neutron [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Refreshing network info cache for port 859e5cfc-c759-44b5-9f3e-fc7a2532d1aa {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 814.785530] env[65503]: DEBUG nova.compute.manager [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 814.785530] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.785888] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80dddb5-bc35-4c59-9e6d-ff3e19f3ad06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.798412] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.801428] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5c0b073-f662-4f42-8c4a-a4334f939ead {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.808073] env[65503]: DEBUG nova.compute.utils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.810525] env[65503]: DEBUG nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 814.810763] env[65503]: DEBUG nova.network.neutron [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 814.811141] env[65503]: WARNING neutronclient.v2_0.client [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 814.811471] env[65503]: WARNING neutronclient.v2_0.client [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 814.812183] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 814.812595] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 814.827309] env[65503]: DEBUG oslo_vmware.api [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 814.827309] env[65503]: value = "task-4449994" [ 814.827309] env[65503]: _type = "Task" [ 814.827309] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.844171] env[65503]: DEBUG oslo_vmware.api [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.946275] env[65503]: DEBUG nova.policy [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31218e12a836406eb32ee65a6900ec32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5a588e741704449878e7a03d7892d11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 815.000998] env[65503]: DEBUG oslo_vmware.api [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449992, 'name': PowerOffVM_Task, 'duration_secs': 0.232813} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.000998] env[65503]: DEBUG nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 815.002945] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 815.003874] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 815.007506] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5b49031-7300-4960-9820-b49fbfe7cefc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.261818] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.262231] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.262570] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleting the datastore file [datastore1] ad8676f9-0433-49bf-bc72-e36fa010ff1d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.262827] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a53f765-ae3b-4944-a85e-f99b590aad85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.270633] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.270853] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.271038] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleting the datastore file [datastore1] a22f589e-7c40-4023-9a4c-9ab2a76faa94 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.271318] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7245106-2bfc-472c-aadc-c2bbaffc1faf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.275287] env[65503]: DEBUG oslo_vmware.api [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 815.275287] env[65503]: value = "task-4449996" [ 815.275287] env[65503]: _type = "Task" [ 815.275287] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.281075] env[65503]: DEBUG oslo_vmware.api [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 815.281075] env[65503]: value = "task-4449997" [ 815.281075] env[65503]: _type = "Task" [ 815.281075] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.288085] env[65503]: WARNING neutronclient.v2_0.client [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 815.288745] env[65503]: WARNING openstack [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 815.289093] env[65503]: WARNING openstack [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 815.296478] env[65503]: DEBUG oslo_vmware.api [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.302994] env[65503]: DEBUG oslo_vmware.api [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.321434] env[65503]: DEBUG nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 815.341876] env[65503]: DEBUG oslo_vmware.api [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449994, 'name': PowerOffVM_Task, 'duration_secs': 0.383263} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.342319] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 815.342442] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 815.343205] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c4fb3cb-7e04-4680-847a-d4e49afd4bd3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.415826] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.416499] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.418161] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleting the datastore file [datastore2] 083797a8-8daf-493b-89de-7ae9137ed538 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.419266] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c731ca8-64fa-4e1f-b2b0-73a7ba51afb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.435090] env[65503]: DEBUG oslo_vmware.api [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 815.435090] env[65503]: value = "task-4449999" [ 815.435090] env[65503]: _type = "Task" [ 815.435090] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.447782] env[65503]: DEBUG oslo_vmware.api [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.533693] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.539704] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "708ed8ab-0ec9-457c-966d-b11c55895981" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.539941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.718193] env[65503]: DEBUG nova.network.neutron [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Successfully created port: d6631e8b-b965-49cf-894f-adee439934ac {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 815.791107] env[65503]: DEBUG oslo_vmware.api [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171482} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.794274] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.794556] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.794720] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.794953] env[65503]: INFO nova.compute.manager [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Took 1.79 seconds to destroy the instance on the hypervisor. [ 815.795249] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 815.796320] env[65503]: DEBUG nova.compute.manager [-] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 815.796320] env[65503]: DEBUG nova.network.neutron [-] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 815.796689] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 815.797167] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 815.797461] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 815.811931] env[65503]: DEBUG oslo_vmware.api [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4449997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168277} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.815558] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.815558] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.815558] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.815558] env[65503]: INFO nova.compute.manager [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Took 1.35 seconds to destroy the instance on the hypervisor. [ 815.815558] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 815.820723] env[65503]: DEBUG nova.compute.manager [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 815.820854] env[65503]: DEBUG nova.network.neutron [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 815.821132] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 815.821673] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 815.822881] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 815.953509] env[65503]: DEBUG oslo_vmware.api [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4449999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186389} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.956626] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.956723] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.956971] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.957114] env[65503]: INFO nova.compute.manager [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Took 1.17 seconds to destroy the instance on the hypervisor. [ 815.961350] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 815.961767] env[65503]: DEBUG nova.compute.manager [-] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 815.961888] env[65503]: DEBUG nova.network.neutron [-] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 815.962162] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 815.963157] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 815.963502] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 815.979190] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 815.997156] env[65503]: WARNING openstack [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 815.997563] env[65503]: WARNING openstack [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 816.028647] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 816.037522] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4694394b-bdc3-4f7c-9f59-184064a4d20b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.049204] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf44b78-220b-46f9-8b2e-d04d9b92fc9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.083822] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c348a22a-c0a4-458f-bede-270cd8bf78cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.092470] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020045c7-b451-4bec-99ed-b39ce79b1b20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.112466] env[65503]: DEBUG nova.compute.provider_tree [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.339563] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 816.342708] env[65503]: DEBUG nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 816.371216] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 816.371546] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.371636] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 816.371808] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.371942] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 816.372559] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 816.372925] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.373207] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 816.373485] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 816.374060] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 816.374350] env[65503]: DEBUG nova.virt.hardware [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 816.375438] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed596c1-8bae-44ce-974b-41faa0643b6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.384799] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9469bca7-4291-44e6-871f-4915328148c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.618022] env[65503]: DEBUG nova.scheduler.client.report [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.036973] env[65503]: DEBUG nova.network.neutron [-] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 817.126302] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.830s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.126302] env[65503]: DEBUG nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 817.133248] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.617s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.133248] env[65503]: DEBUG nova.objects.instance [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lazy-loading 'resources' on Instance uuid 0c0c6d3e-f4d2-458f-aa69-19f87a37f162 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.188480] env[65503]: WARNING neutronclient.v2_0.client [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 817.189166] env[65503]: WARNING openstack [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 817.192015] env[65503]: WARNING openstack [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 817.231681] env[65503]: DEBUG nova.network.neutron [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 817.464626] env[65503]: DEBUG nova.network.neutron [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updated VIF entry in instance network info cache for port 859e5cfc-c759-44b5-9f3e-fc7a2532d1aa. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 817.465332] env[65503]: DEBUG nova.network.neutron [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "859e5cfc-c759-44b5-9f3e-fc7a2532d1aa", "address": "fa:16:3e:d6:b9:72", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap859e5cfc-c7", "ovs_interfaceid": "859e5cfc-c759-44b5-9f3e-fc7a2532d1aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 817.488268] env[65503]: DEBUG nova.network.neutron [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Successfully updated port: d6631e8b-b965-49cf-894f-adee439934ac {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 817.539865] env[65503]: INFO nova.compute.manager [-] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Took 1.74 seconds to deallocate network for instance. [ 817.630370] env[65503]: DEBUG nova.compute.utils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 817.631651] env[65503]: DEBUG nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 817.631826] env[65503]: DEBUG nova.network.neutron [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 817.632304] env[65503]: WARNING neutronclient.v2_0.client [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 817.632553] env[65503]: WARNING neutronclient.v2_0.client [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 817.633805] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 817.633805] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 817.640225] env[65503]: DEBUG nova.network.neutron [-] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 817.680516] env[65503]: DEBUG nova.policy [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bb5e13cc99b46ec8eb5a6befc982bd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ecc78e2a9434c05bb07afa8e31e918d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.736852] env[65503]: INFO nova.compute.manager [-] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Took 1.91 seconds to deallocate network for instance. [ 817.949553] env[65503]: DEBUG nova.compute.manager [req-38ef7961-c764-4bd1-9dea-598bafaf4a1e req-3baffc93-1219-4ec2-9416-fa80e9e477a4 service nova] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Received event network-vif-deleted-cb078d1e-65b3-4dfd-980e-792978036eb5 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 817.949763] env[65503]: DEBUG nova.compute.manager [req-38ef7961-c764-4bd1-9dea-598bafaf4a1e req-3baffc93-1219-4ec2-9416-fa80e9e477a4 service nova] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Received event network-vif-deleted-6e3dc22f-5c81-48e2-9afd-c567f7010a9b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 817.970782] env[65503]: DEBUG oslo_concurrency.lockutils [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] Releasing lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.971091] env[65503]: DEBUG nova.compute.manager [req-ce81a6ca-dabc-41c3-b6eb-b2ef70f12e44 req-9bd6a4a3-aaa9-45cb-80f7-955afb8d6a61 service nova] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Received event network-vif-deleted-bd140c8d-63d1-4c8c-a14e-2f8ce80a648c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 817.990075] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "refresh_cache-e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.991479] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "refresh_cache-e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.991479] env[65503]: DEBUG nova.network.neutron [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 818.044153] env[65503]: DEBUG nova.network.neutron [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Successfully created port: 23e068ca-4763-4b07-a124-fdeee41f5399 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 818.048084] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.141985] env[65503]: DEBUG nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 818.146100] env[65503]: INFO nova.compute.manager [-] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Took 2.18 seconds to deallocate network for instance. [ 818.220820] env[65503]: DEBUG nova.compute.manager [req-a022d680-a045-4cce-bdf9-b673c76fb30c req-d3e092c5-7e9a-400c-8d3f-01db31cccec8 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Received event network-vif-plugged-d6631e8b-b965-49cf-894f-adee439934ac {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 818.221314] env[65503]: DEBUG oslo_concurrency.lockutils [req-a022d680-a045-4cce-bdf9-b673c76fb30c req-d3e092c5-7e9a-400c-8d3f-01db31cccec8 service nova] Acquiring lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.221551] env[65503]: DEBUG oslo_concurrency.lockutils [req-a022d680-a045-4cce-bdf9-b673c76fb30c req-d3e092c5-7e9a-400c-8d3f-01db31cccec8 service nova] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.221738] env[65503]: DEBUG oslo_concurrency.lockutils [req-a022d680-a045-4cce-bdf9-b673c76fb30c req-d3e092c5-7e9a-400c-8d3f-01db31cccec8 service nova] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.221911] env[65503]: DEBUG nova.compute.manager [req-a022d680-a045-4cce-bdf9-b673c76fb30c req-d3e092c5-7e9a-400c-8d3f-01db31cccec8 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] No waiting events found dispatching network-vif-plugged-d6631e8b-b965-49cf-894f-adee439934ac {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 818.222692] env[65503]: WARNING nova.compute.manager [req-a022d680-a045-4cce-bdf9-b673c76fb30c req-d3e092c5-7e9a-400c-8d3f-01db31cccec8 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Received unexpected event network-vif-plugged-d6631e8b-b965-49cf-894f-adee439934ac for instance with vm_state building and task_state spawning. [ 818.247230] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.307506] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd8a5ed-548a-470b-a50c-e365f2045c26 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.318569] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50398c7-d013-407c-975d-79956a15c371 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.354022] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba95f348-7d49-4353-bfad-541ccb5ef74b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.363569] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52a2242-6f99-40ac-8cb3-eddf083b2005 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.380156] env[65503]: DEBUG nova.compute.provider_tree [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.408353] env[65503]: INFO nova.compute.manager [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Rebuilding instance [ 818.454272] env[65503]: DEBUG nova.compute.manager [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 818.455572] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c7f832-2c6d-4444-94de-89a48e0970c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.495927] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 818.496514] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 818.654030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.884081] env[65503]: DEBUG nova.scheduler.client.report [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.939050] env[65503]: DEBUG nova.network.neutron [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 818.984756] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 818.985235] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 819.076865] env[65503]: WARNING neutronclient.v2_0.client [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 819.077580] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 819.077974] env[65503]: WARNING openstack [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 819.158153] env[65503]: DEBUG nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 819.164389] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.164599] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.191909] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 819.193311] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 819.193634] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 819.193937] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 819.194178] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 819.194386] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 819.194723] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 819.194973] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 819.195242] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 819.195477] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 819.195738] env[65503]: DEBUG nova.virt.hardware [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 819.197083] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8ce93e-ce8e-49d8-adb2-e471a0e83cfb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.209621] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4144ac83-05b2-444b-a369-0eaf00b793c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.232145] env[65503]: DEBUG nova.network.neutron [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Updating instance_info_cache with network_info: [{"id": "d6631e8b-b965-49cf-894f-adee439934ac", "address": "fa:16:3e:05:f9:98", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6631e8b-b9", "ovs_interfaceid": "d6631e8b-b965-49cf-894f-adee439934ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 819.389878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.259s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.392334] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.376s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.392568] env[65503]: DEBUG nova.objects.instance [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lazy-loading 'resources' on Instance uuid f7b81948-c480-47a4-9d0f-5c2c163bd7f2 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.430435] env[65503]: INFO nova.scheduler.client.report [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Deleted allocations for instance 0c0c6d3e-f4d2-458f-aa69-19f87a37f162 [ 819.473900] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.475028] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8226d59b-0788-443f-a615-04d940c8a9ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.484031] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 819.484031] env[65503]: value = "task-4450000" [ 819.484031] env[65503]: _type = "Task" [ 819.484031] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.493979] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450000, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.531029] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-a197b590-1f74-4241-9579-2f2d3bb89a1d-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.531301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-a197b590-1f74-4241-9579-2f2d3bb89a1d-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.702859] env[65503]: DEBUG nova.network.neutron [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Successfully updated port: 23e068ca-4763-4b07-a124-fdeee41f5399 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 819.735291] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "refresh_cache-e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.735703] env[65503]: DEBUG nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Instance network_info: |[{"id": "d6631e8b-b965-49cf-894f-adee439934ac", "address": "fa:16:3e:05:f9:98", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6631e8b-b9", "ovs_interfaceid": "d6631e8b-b965-49cf-894f-adee439934ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 819.736575] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:f9:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6631e8b-b965-49cf-894f-adee439934ac', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.744310] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 819.744520] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 819.744745] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a77c041d-319b-46e2-a10c-c8d7cd8984ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.769418] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.769418] env[65503]: value = "task-4450001" [ 819.769418] env[65503]: _type = "Task" [ 819.769418] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.782635] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450001, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.942742] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6db86825-f2f9-4f0e-bf51-306471da0f30 tempest-AttachInterfacesUnderV243Test-1101859705 tempest-AttachInterfacesUnderV243Test-1101859705-project-member] Lock "0c0c6d3e-f4d2-458f-aa69-19f87a37f162" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.451s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.998204] env[65503]: DEBUG nova.compute.manager [req-c26db041-2ca1-4ced-bafc-0a7e00509f83 req-13130ccb-e7d8-4f9c-bbcf-719217715c85 service nova] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Received event network-vif-deleted-f956ccee-3465-49d9-8608-0bb9d01afbe6 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 820.001973] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450000, 'name': PowerOffVM_Task, 'duration_secs': 0.148423} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.002762] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 820.003218] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.004720] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa266f90-6294-4eef-bab3-e5b167e533cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.016483] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.017260] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02ba99bd-780a-4ebf-a9df-11b22b06312d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.035590] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.035590] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.036984] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5a9164-dde2-435d-8112-7c97cc72fc49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.047812] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.047983] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.048182] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Deleting the datastore file [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.048547] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1128f874-bb80-458c-9bb3-9ca79fe14b54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.066677] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea75093-d7e7-4f42-86e1-3d2c75b314d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.071901] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 820.071901] env[65503]: value = "task-4450003" [ 820.071901] env[65503]: _type = "Task" [ 820.071901] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.094806] env[65503]: WARNING neutronclient.v2_0.client [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 820.101616] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Reconfiguring VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 820.106100] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcfe14ae-99d6-4222-a3f8-8fba5fd22e9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.135327] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450003, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.143130] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 820.143130] env[65503]: value = "task-4450004" [ 820.143130] env[65503]: _type = "Task" [ 820.143130] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.159670] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.208414] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.208700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.208957] env[65503]: DEBUG nova.network.neutron [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 820.263731] env[65503]: DEBUG nova.compute.manager [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Received event network-changed-d6631e8b-b965-49cf-894f-adee439934ac {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 820.263940] env[65503]: DEBUG nova.compute.manager [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Refreshing instance network info cache due to event network-changed-d6631e8b-b965-49cf-894f-adee439934ac. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 820.264150] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Acquiring lock "refresh_cache-e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.264312] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Acquired lock "refresh_cache-e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.264453] env[65503]: DEBUG nova.network.neutron [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Refreshing network info cache for port d6631e8b-b965-49cf-894f-adee439934ac {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 820.280792] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450001, 'name': CreateVM_Task, 'duration_secs': 0.405311} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.283512] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.284511] env[65503]: WARNING neutronclient.v2_0.client [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 820.284888] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.285047] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.285411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 820.285924] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dea3cb48-a939-4fb4-bb12-9b2b52d33066 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.292425] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 820.292425] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a9bfbf-5de1-634b-ba41-9bf830c89865" [ 820.292425] env[65503]: _type = "Task" [ 820.292425] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.303558] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a9bfbf-5de1-634b-ba41-9bf830c89865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.562855] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6893dc72-73b2-41e9-b7e6-59154f00cf77 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.571518] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5b2e5c-d70d-493f-b813-7d5dfdd7aaa3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.584546] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450003, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12294} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.613637] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.613856] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.614042] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.617745] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda627e2-f0af-4f9b-ae6a-11574405f7f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.626552] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df6e74e-bc36-42de-bfe1-ada61ea67df9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.648241] env[65503]: DEBUG nova.compute.provider_tree [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.659198] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.712998] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 820.713506] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 820.761851] env[65503]: DEBUG nova.network.neutron [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 820.770569] env[65503]: WARNING neutronclient.v2_0.client [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 820.771946] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 820.772045] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 820.783663] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 820.784216] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 820.806427] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a9bfbf-5de1-634b-ba41-9bf830c89865, 'name': SearchDatastore_Task, 'duration_secs': 0.011276} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.806427] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.806427] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.806427] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.806427] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.806427] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.806427] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d8c3551-cddf-4795-ad88-d726e43873b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.816790] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.816969] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.817766] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbc3d202-d1ca-44d9-a580-db72583b945b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.824233] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 820.824233] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a748f6-dd79-1b41-de5a-21646f23e65e" [ 820.824233] env[65503]: _type = "Task" [ 820.824233] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.833123] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a748f6-dd79-1b41-de5a-21646f23e65e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.971202] env[65503]: WARNING neutronclient.v2_0.client [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 820.971865] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 820.972355] env[65503]: WARNING openstack [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 820.986489] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 820.986855] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 821.104404] env[65503]: DEBUG nova.network.neutron [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Updating instance_info_cache with network_info: [{"id": "23e068ca-4763-4b07-a124-fdeee41f5399", "address": "fa:16:3e:92:7b:5e", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e068ca-47", "ovs_interfaceid": "23e068ca-4763-4b07-a124-fdeee41f5399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 821.157692] env[65503]: DEBUG nova.scheduler.client.report [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.172595] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.227029] env[65503]: WARNING neutronclient.v2_0.client [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 821.227811] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 821.228258] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 821.336545] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a748f6-dd79-1b41-de5a-21646f23e65e, 'name': SearchDatastore_Task, 'duration_secs': 0.011428} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.336545] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5084bb99-f46c-4582-98c2-8185c3967381 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.349172] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 821.349172] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e38f2b-741a-2b69-a046-2599c2d027a5" [ 821.349172] env[65503]: _type = "Task" [ 821.349172] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.359159] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e38f2b-741a-2b69-a046-2599c2d027a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.407714] env[65503]: DEBUG nova.network.neutron [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Updated VIF entry in instance network info cache for port d6631e8b-b965-49cf-894f-adee439934ac. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 821.408213] env[65503]: DEBUG nova.network.neutron [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Updating instance_info_cache with network_info: [{"id": "d6631e8b-b965-49cf-894f-adee439934ac", "address": "fa:16:3e:05:f9:98", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6631e8b-b9", "ovs_interfaceid": "d6631e8b-b965-49cf-894f-adee439934ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 821.610267] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.610267] env[65503]: DEBUG nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Instance network_info: |[{"id": "23e068ca-4763-4b07-a124-fdeee41f5399", "address": "fa:16:3e:92:7b:5e", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e068ca-47", "ovs_interfaceid": "23e068ca-4763-4b07-a124-fdeee41f5399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 821.610267] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:7b:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23e068ca-4763-4b07-a124-fdeee41f5399', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.626139] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Creating folder: Project (5ecc78e2a9434c05bb07afa8e31e918d). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.630580] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a70d45c-2b44-404a-b597-d131d04df62d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.645302] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Created folder: Project (5ecc78e2a9434c05bb07afa8e31e918d) in parent group-v870190. [ 821.645772] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Creating folder: Instances. Parent ref: group-v870354. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.646640] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d2bd1a7-1b2d-4c36-aae9-cb13b465cf21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.661114] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.663708] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.271s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.668167] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.361s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.668512] env[65503]: DEBUG nova.objects.instance [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lazy-loading 'resources' on Instance uuid 1c598208-a4d0-46b8-9a9c-107353e957b9 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.670046] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Created folder: Instances in parent group-v870354. [ 821.670401] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 821.670657] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.670919] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ace86bf-bbd0-4fc2-817b-368d8995008a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.696077] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 821.696229] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.696372] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 821.696624] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.696801] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 821.696977] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 821.697242] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 821.697401] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 821.697600] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 821.697799] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 821.698017] env[65503]: DEBUG nova.virt.hardware [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 821.699789] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdfc8e9-9281-496d-8b8f-38975fd797ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.703650] env[65503]: INFO nova.scheduler.client.report [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Deleted allocations for instance f7b81948-c480-47a4-9d0f-5c2c163bd7f2 [ 821.710997] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.710997] env[65503]: value = "task-4450007" [ 821.710997] env[65503]: _type = "Task" [ 821.710997] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.718323] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b308641-d765-46fc-9c63-0551ce972c91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.737051] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.747198] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 821.748621] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.751309] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450007, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.751309] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08f55b03-f835-4472-b0d7-3f377e511867 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.773436] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.773436] env[65503]: value = "task-4450008" [ 821.773436] env[65503]: _type = "Task" [ 821.773436] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.788834] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450008, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.861401] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e38f2b-741a-2b69-a046-2599c2d027a5, 'name': SearchDatastore_Task, 'duration_secs': 0.015103} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.861922] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.861922] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf/e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.862381] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b3325f6-73ee-4882-add6-ad052a8463cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.870757] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 821.870757] env[65503]: value = "task-4450009" [ 821.870757] env[65503]: _type = "Task" [ 821.870757] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.880249] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.914811] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Releasing lock "refresh_cache-e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.915221] env[65503]: DEBUG nova.compute.manager [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Received event network-vif-plugged-23e068ca-4763-4b07-a124-fdeee41f5399 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 821.915326] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Acquiring lock "bdbae548-eefc-4e59-8053-f4b8e232580d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.915540] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.915701] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.915892] env[65503]: DEBUG nova.compute.manager [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] No waiting events found dispatching network-vif-plugged-23e068ca-4763-4b07-a124-fdeee41f5399 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 821.916045] env[65503]: WARNING nova.compute.manager [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Received unexpected event network-vif-plugged-23e068ca-4763-4b07-a124-fdeee41f5399 for instance with vm_state building and task_state spawning. [ 821.916195] env[65503]: DEBUG nova.compute.manager [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Received event network-changed-23e068ca-4763-4b07-a124-fdeee41f5399 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 821.916340] env[65503]: DEBUG nova.compute.manager [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Refreshing instance network info cache due to event network-changed-23e068ca-4763-4b07-a124-fdeee41f5399. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 821.916516] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Acquiring lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.916644] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Acquired lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.916795] env[65503]: DEBUG nova.network.neutron [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Refreshing network info cache for port 23e068ca-4763-4b07-a124-fdeee41f5399 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 822.166131] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.220966] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07f0857f-f326-4924-9336-9a6b899686eb tempest-ServersTestFqdnHostnames-1231859803 tempest-ServersTestFqdnHostnames-1231859803-project-member] Lock "f7b81948-c480-47a4-9d0f-5c2c163bd7f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.418s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.231345] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450007, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.292272] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450008, 'name': CreateVM_Task, 'duration_secs': 0.303145} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.295085] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.296390] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.296390] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.297730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 822.297730] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45bf654e-751c-44cf-9a2e-82300ff512b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.305161] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 822.305161] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ca9004-ce47-350c-30db-5b2f3171c9cd" [ 822.305161] env[65503]: _type = "Task" [ 822.305161] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.324025] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ca9004-ce47-350c-30db-5b2f3171c9cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.390807] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450009, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.420170] env[65503]: WARNING neutronclient.v2_0.client [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 822.421424] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 822.422068] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 822.589494] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 822.589865] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 822.664614] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.728749] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450007, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.775990] env[65503]: WARNING neutronclient.v2_0.client [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 822.776745] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 822.777113] env[65503]: WARNING openstack [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 822.820623] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ca9004-ce47-350c-30db-5b2f3171c9cd, 'name': SearchDatastore_Task, 'duration_secs': 0.041358} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.821553] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.821553] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.821553] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.821872] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.821872] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.825265] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fac1cac1-5c10-4413-ac03-3b95d9f40e61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.836960] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.837172] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.837955] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf63e463-4566-4bcd-adbd-61a77714a932 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.848771] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 822.848771] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a0e15b-7168-9655-2563-b94cc8ace361" [ 822.848771] env[65503]: _type = "Task" [ 822.848771] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.863900] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a0e15b-7168-9655-2563-b94cc8ace361, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.886932] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577877} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.886932] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf/e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.886932] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.886932] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea2123ab-867d-4747-9bb2-f437736b871c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.895142] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 822.895142] env[65503]: value = "task-4450010" [ 822.895142] env[65503]: _type = "Task" [ 822.895142] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.911132] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.918921] env[65503]: DEBUG nova.network.neutron [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Updated VIF entry in instance network info cache for port 23e068ca-4763-4b07-a124-fdeee41f5399. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 822.921032] env[65503]: DEBUG nova.network.neutron [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Updating instance_info_cache with network_info: [{"id": "23e068ca-4763-4b07-a124-fdeee41f5399", "address": "fa:16:3e:92:7b:5e", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e068ca-47", "ovs_interfaceid": "23e068ca-4763-4b07-a124-fdeee41f5399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 822.929784] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25c93c2-9ff3-4a26-b9b1-c39f35966a29 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.940271] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770ea23c-1122-431d-95ec-973094ddf971 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.976553] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a602f2-98cd-4906-ae05-3cb2bcaf2f78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.985675] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2471b4b-8765-407e-9b5a-a49590e4b5db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.003444] env[65503]: DEBUG nova.compute.provider_tree [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.162476] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.227813] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450007, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.361137] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a0e15b-7168-9655-2563-b94cc8ace361, 'name': SearchDatastore_Task, 'duration_secs': 0.036752} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.362065] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5665bdf2-d590-41c5-9945-72a579a6c803 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.369062] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 823.369062] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cf91cc-ccba-02ed-bda3-5e430892902b" [ 823.369062] env[65503]: _type = "Task" [ 823.369062] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.380393] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cf91cc-ccba-02ed-bda3-5e430892902b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.408606] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136078} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.409846] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.410783] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebca8d1-34b2-4ae0-86db-14f82742083a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.427899] env[65503]: DEBUG oslo_concurrency.lockutils [req-f46c191c-f835-4d22-8f90-2cc52fd9a717 req-53e0ed09-ca50-4141-962b-496338141cd3 service nova] Releasing lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.437950] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf/e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.438532] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0eed0267-7f3b-4cc4-8223-5fb3f4d7ac18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.462528] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 823.462528] env[65503]: value = "task-4450011" [ 823.462528] env[65503]: _type = "Task" [ 823.462528] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.473398] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.506673] env[65503]: DEBUG nova.scheduler.client.report [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.663585] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.727120] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450007, 'name': CreateVM_Task, 'duration_secs': 1.66817} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.727466] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 823.728168] env[65503]: WARNING neutronclient.v2_0.client [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 823.728677] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.728955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.729393] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 823.729775] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6febfe97-2c97-4881-9c3c-815f3f0da31b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.737083] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 823.737083] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ede44d-0c11-aff4-c974-1a967582feb2" [ 823.737083] env[65503]: _type = "Task" [ 823.737083] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.747520] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ede44d-0c11-aff4-c974-1a967582feb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.885025] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cf91cc-ccba-02ed-bda3-5e430892902b, 'name': SearchDatastore_Task, 'duration_secs': 0.035818} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.885025] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.885025] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 823.885025] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1229ee02-e60b-4d8a-9c21-b71c268ca62c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.894032] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 823.894032] env[65503]: value = "task-4450012" [ 823.894032] env[65503]: _type = "Task" [ 823.894032] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.905154] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450012, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.975247] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.012644] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.344s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.015882] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.190s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.016983] env[65503]: INFO nova.compute.claims [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.046412] env[65503]: INFO nova.scheduler.client.report [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Deleted allocations for instance 1c598208-a4d0-46b8-9a9c-107353e957b9 [ 824.163836] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.247245] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ede44d-0c11-aff4-c974-1a967582feb2, 'name': SearchDatastore_Task, 'duration_secs': 0.015863} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.247713] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.247817] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.248023] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.248117] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.248360] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.248970] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22fe5949-514a-4b3e-93b6-1e6c8fe0548f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.262106] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.262365] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.263276] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72b34e9c-0687-43de-9027-8a436f08ce90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.270499] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 824.270499] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206e294-10ac-9f35-cbfd-281bace98810" [ 824.270499] env[65503]: _type = "Task" [ 824.270499] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.279851] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206e294-10ac-9f35-cbfd-281bace98810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.403920] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450012, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.478424] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450011, 'name': ReconfigVM_Task, 'duration_secs': 0.60559} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.478759] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Reconfigured VM instance instance-00000039 to attach disk [datastore1] e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf/e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.480556] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edacd925-e1c5-4fc7-908b-8204878883a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.489475] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 824.489475] env[65503]: value = "task-4450013" [ 824.489475] env[65503]: _type = "Task" [ 824.489475] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.504184] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450013, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.555940] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d3678b4-3c3a-42fd-a59d-d06a7d8c5f6f tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "1c598208-a4d0-46b8-9a9c-107353e957b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.619s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.670533] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.783898] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206e294-10ac-9f35-cbfd-281bace98810, 'name': SearchDatastore_Task, 'duration_secs': 0.033572} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.783898] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1fbe2b5-dcbd-484c-83df-a7f459525224 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.791119] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 824.791119] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5299b297-6add-d026-6efc-3bec825b925a" [ 824.791119] env[65503]: _type = "Task" [ 824.791119] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.802633] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5299b297-6add-d026-6efc-3bec825b925a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.904158] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450012, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630498} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.904466] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 824.904611] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 824.904964] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd9026f9-5fa1-46c3-841e-0f2c4f7f4867 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.915707] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 824.915707] env[65503]: value = "task-4450014" [ 824.915707] env[65503]: _type = "Task" [ 824.915707] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.928884] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450014, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.000707] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450013, 'name': Rename_Task, 'duration_secs': 0.179972} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.001030] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.001362] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a508fb1-a0b5-4204-8a36-e1c01cd0f76e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.009802] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 825.009802] env[65503]: value = "task-4450015" [ 825.009802] env[65503]: _type = "Task" [ 825.009802] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.024050] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.165830] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.303459] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5299b297-6add-d026-6efc-3bec825b925a, 'name': SearchDatastore_Task, 'duration_secs': 0.011593} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.306803] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.307142] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] bdbae548-eefc-4e59-8053-f4b8e232580d/bdbae548-eefc-4e59-8053-f4b8e232580d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.307710] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dee8ed41-0bf0-49da-b63c-1b8fa3860b59 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.318147] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 825.318147] env[65503]: value = "task-4450016" [ 825.318147] env[65503]: _type = "Task" [ 825.318147] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.331988] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.429512] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450014, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071995} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.431669] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 825.434879] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94853d2b-d102-4f33-897e-ae69aaf88731 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.461414] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 825.464955] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed39e7b8-53d1-4cf9-a146-dba4ed8f4630 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.493991] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 825.493991] env[65503]: value = "task-4450017" [ 825.493991] env[65503]: _type = "Task" [ 825.493991] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.507548] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450017, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.528545] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450015, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.665131] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.735806] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d9f455-d9c5-43c1-bae6-8f9c5fe4dd06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.746285] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5641987e-eb55-4750-83d5-1aa85ac1c1a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.806633] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131a0fe5-a9ba-45ec-8880-19a74e786814 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.819300] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25734d5-ae45-42aa-84e0-9a522a080a12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.851541] env[65503]: DEBUG nova.compute.provider_tree [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.858577] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450016, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.007159] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450017, 'name': ReconfigVM_Task, 'duration_secs': 0.34118} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.007159] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Reconfigured VM instance instance-00000037 to attach disk [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3/e74fe378-737a-4732-9a2d-b889a436b8a3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 826.007159] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d773eb9c-bce7-49f6-8cae-1cd480f8ea88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.016477] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 826.016477] env[65503]: value = "task-4450018" [ 826.016477] env[65503]: _type = "Task" [ 826.016477] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.027545] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450018, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.031071] env[65503]: DEBUG oslo_vmware.api [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450015, 'name': PowerOnVM_Task, 'duration_secs': 0.536347} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.032047] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.032047] env[65503]: INFO nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Took 9.69 seconds to spawn the instance on the hypervisor. [ 826.032047] env[65503]: DEBUG nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 826.032588] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5568fc2-5f40-4929-be4f-48637c5928e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.172073] env[65503]: DEBUG oslo_vmware.api [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450004, 'name': ReconfigVM_Task, 'duration_secs': 5.917997} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.172073] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.172073] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Reconfigured VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 826.172073] env[65503]: WARNING neutronclient.v2_0.client [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 826.172073] env[65503]: WARNING neutronclient.v2_0.client [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 826.172412] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 826.172719] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 826.339860] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67816} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.340328] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] bdbae548-eefc-4e59-8053-f4b8e232580d/bdbae548-eefc-4e59-8053-f4b8e232580d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.340559] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.340861] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2fabe0d-1e52-49c2-b936-d69e06878cef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.349497] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 826.349497] env[65503]: value = "task-4450019" [ 826.349497] env[65503]: _type = "Task" [ 826.349497] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.361332] env[65503]: DEBUG nova.scheduler.client.report [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 826.365949] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450019, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.528908] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450018, 'name': Rename_Task, 'duration_secs': 0.140344} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.529875] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.530156] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12339927-3a97-4760-9d04-2a200fee75ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.540358] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 826.540358] env[65503]: value = "task-4450020" [ 826.540358] env[65503]: _type = "Task" [ 826.540358] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.555428] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.556548] env[65503]: INFO nova.compute.manager [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Took 45.58 seconds to build instance. [ 826.629104] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.629500] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.629827] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.630035] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.630526] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.634414] env[65503]: INFO nova.compute.manager [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Terminating instance [ 826.862314] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450019, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071419} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.862688] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.863730] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62aa932e-e6a0-4a64-a711-9b27013f4bb3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.867929] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.853s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.868401] env[65503]: DEBUG nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 826.871340] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.602s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.872812] env[65503]: INFO nova.compute.claims [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.913402] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] bdbae548-eefc-4e59-8053-f4b8e232580d/bdbae548-eefc-4e59-8053-f4b8e232580d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.914411] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c0ddbe7-a10a-4bba-96a1-dd2e91bd7caa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.948806] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 826.948806] env[65503]: value = "task-4450021" [ 826.948806] env[65503]: _type = "Task" [ 826.948806] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.962629] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450021, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.966340] env[65503]: DEBUG nova.compute.manager [req-c82c06dd-c35a-46ab-a5f3-c9c0c1013f75 req-12ebfddf-bd51-438c-b456-91981269174a service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received event network-vif-deleted-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 826.966459] env[65503]: INFO nova.compute.manager [req-c82c06dd-c35a-46ab-a5f3-c9c0c1013f75 req-12ebfddf-bd51-438c-b456-91981269174a service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Neutron deleted interface 859e5cfc-c759-44b5-9f3e-fc7a2532d1aa; detaching it from the instance and deleting it from the info cache [ 826.966710] env[65503]: DEBUG nova.network.neutron [req-c82c06dd-c35a-46ab-a5f3-c9c0c1013f75 req-12ebfddf-bd51-438c-b456-91981269174a service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 827.053442] env[65503]: DEBUG oslo_vmware.api [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450020, 'name': PowerOnVM_Task, 'duration_secs': 0.462205} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.053976] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.053976] env[65503]: DEBUG nova.compute.manager [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 827.055938] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23864da8-ae5c-4b7e-bd7f-c4e9e0c5ae37 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.058630] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2060296c-774d-42e5-87b4-4807d4b00188 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.100s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.139505] env[65503]: DEBUG nova.compute.manager [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 827.140609] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.141945] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed2c7fc-e49d-4d8f-b84b-8caad917c8dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.156089] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.156391] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1d57112-36c9-4dc0-83c8-8932fbd69a06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.168224] env[65503]: DEBUG oslo_vmware.api [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 827.168224] env[65503]: value = "task-4450022" [ 827.168224] env[65503]: _type = "Task" [ 827.168224] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.187805] env[65503]: DEBUG oslo_vmware.api [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4450022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.380707] env[65503]: DEBUG nova.compute.utils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 827.386035] env[65503]: DEBUG nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 827.386035] env[65503]: DEBUG nova.network.neutron [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 827.386035] env[65503]: WARNING neutronclient.v2_0.client [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 827.386035] env[65503]: WARNING neutronclient.v2_0.client [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 827.387612] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 827.387612] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 827.460225] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450021, 'name': ReconfigVM_Task, 'duration_secs': 0.462136} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.461303] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Reconfigured VM instance instance-00000038 to attach disk [datastore1] bdbae548-eefc-4e59-8053-f4b8e232580d/bdbae548-eefc-4e59-8053-f4b8e232580d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.461303] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f7821b9-3392-41e6-8eaa-8444111b1e42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.468898] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 827.468898] env[65503]: value = "task-4450023" [ 827.468898] env[65503]: _type = "Task" [ 827.468898] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.472489] env[65503]: DEBUG oslo_concurrency.lockutils [req-c82c06dd-c35a-46ab-a5f3-c9c0c1013f75 req-12ebfddf-bd51-438c-b456-91981269174a service nova] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.472675] env[65503]: DEBUG oslo_concurrency.lockutils [req-c82c06dd-c35a-46ab-a5f3-c9c0c1013f75 req-12ebfddf-bd51-438c-b456-91981269174a service nova] Acquired lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.473453] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c1a93a-5d97-4c34-9d0c-92ebf14dca12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.484022] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450023, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.496794] env[65503]: DEBUG oslo_concurrency.lockutils [req-c82c06dd-c35a-46ab-a5f3-c9c0c1013f75 req-12ebfddf-bd51-438c-b456-91981269174a service nova] Releasing lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.497109] env[65503]: WARNING nova.compute.manager [req-c82c06dd-c35a-46ab-a5f3-c9c0c1013f75 req-12ebfddf-bd51-438c-b456-91981269174a service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Detach interface failed, port_id=859e5cfc-c759-44b5-9f3e-fc7a2532d1aa, reason: No device with interface-id 859e5cfc-c759-44b5-9f3e-fc7a2532d1aa exists on VM: nova.exception.NotFound: No device with interface-id 859e5cfc-c759-44b5-9f3e-fc7a2532d1aa exists on VM [ 827.520387] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.520763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.520949] env[65503]: DEBUG nova.network.neutron [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 827.563592] env[65503]: DEBUG nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 827.583709] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.660867] env[65503]: DEBUG nova.policy [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bb5e13cc99b46ec8eb5a6befc982bd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ecc78e2a9434c05bb07afa8e31e918d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 827.679183] env[65503]: DEBUG oslo_vmware.api [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4450022, 'name': PowerOffVM_Task, 'duration_secs': 0.22509} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.679456] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.679596] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.679847] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35cc3f0c-61f4-4bd7-97c3-4b084f90c853 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.743082] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 827.743082] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 827.743178] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Deleting the datastore file [datastore1] 2ab1cd4b-f2c0-4264-8463-8127a733a1c5 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.743404] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f39dd54d-8adb-48ac-8654-3a3d6c371fff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.751814] env[65503]: DEBUG oslo_vmware.api [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for the task: (returnval){ [ 827.751814] env[65503]: value = "task-4450025" [ 827.751814] env[65503]: _type = "Task" [ 827.751814] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.761201] env[65503]: DEBUG oslo_vmware.api [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4450025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.885692] env[65503]: DEBUG nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 827.988488] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450023, 'name': Rename_Task, 'duration_secs': 0.347213} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.992259] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.992309] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15a9b2ab-c0f8-4c58-a2f1-d54daea9044d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.999862] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 827.999862] env[65503]: value = "task-4450026" [ 827.999862] env[65503]: _type = "Task" [ 827.999862] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.020658] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.026167] env[65503]: WARNING neutronclient.v2_0.client [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.026855] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 828.027191] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 828.039348] env[65503]: DEBUG nova.network.neutron [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Successfully created port: 2a4e1930-c256-421a-8d7b-a1ec9324152d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 828.091946] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.266933] env[65503]: DEBUG oslo_vmware.api [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Task: {'id': task-4450025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.361916} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.267233] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.267455] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.267658] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.267870] env[65503]: INFO nova.compute.manager [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 828.268271] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 828.268383] env[65503]: DEBUG nova.compute.manager [-] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 828.268514] env[65503]: DEBUG nova.network.neutron [-] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 828.268841] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.269690] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 828.270015] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 828.516918] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450026, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.548576] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b432722d-ea94-4221-86ac-ccb10332059e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.556773] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68414e12-e882-46b5-b400-8b6a978438ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.589228] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7037de22-d103-4245-975f-a7fdc6a7c2d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.598381] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9976f4-7293-467a-91a5-e243361579f4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.614454] env[65503]: DEBUG nova.compute.provider_tree [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.899201] env[65503]: DEBUG nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 828.934849] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 828.935487] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.936725] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 828.936725] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.936725] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 828.936725] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 828.937362] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 828.940115] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 828.940115] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 828.940115] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 828.940115] env[65503]: DEBUG nova.virt.hardware [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 828.940115] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c61f268-a6f1-430a-aea5-eb594807f181 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.945731] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.954667] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56aace27-5f43-4c4c-8e21-53fd4029964b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.980883] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 828.981921] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 829.015633] env[65503]: DEBUG oslo_vmware.api [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450026, 'name': PowerOnVM_Task, 'duration_secs': 0.783989} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.016041] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.016300] env[65503]: INFO nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Took 9.86 seconds to spawn the instance on the hypervisor. [ 829.016519] env[65503]: DEBUG nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 829.017404] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9877eac-b480-4db6-829f-052efe972a23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.118424] env[65503]: DEBUG nova.scheduler.client.report [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 829.334829] env[65503]: WARNING neutronclient.v2_0.client [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 829.335535] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 829.335875] env[65503]: WARNING openstack [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 829.462743] env[65503]: DEBUG nova.network.neutron [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [{"id": "f191aa02-3240-4647-9358-ee80ef3eb29d", "address": "fa:16:3e:b7:65:3a", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf191aa02-32", "ovs_interfaceid": "f191aa02-3240-4647-9358-ee80ef3eb29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 829.539975] env[65503]: INFO nova.compute.manager [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Took 48.57 seconds to build instance. [ 829.624600] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.625572] env[65503]: DEBUG nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 829.628538] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.414s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.628538] env[65503]: DEBUG nova.objects.instance [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lazy-loading 'resources' on Instance uuid ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 829.725310] env[65503]: DEBUG nova.network.neutron [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Successfully updated port: 2a4e1930-c256-421a-8d7b-a1ec9324152d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 829.735963] env[65503]: DEBUG nova.network.neutron [-] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 829.966842] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-a197b590-1f74-4241-9579-2f2d3bb89a1d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.043413] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7019a06d-3c88-4515-b902-88f729f72574 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.086s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.092756] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.093019] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.093228] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.093488] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.093656] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.096207] env[65503]: INFO nova.compute.manager [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Terminating instance [ 830.121557] env[65503]: DEBUG nova.compute.manager [req-7012983b-d466-4469-9f37-7bb574b3e0b6 req-84c0a2ae-b2e5-406a-8897-8b31ab6a003b service nova] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Received event network-vif-deleted-3cc93e6f-5b65-4d1b-8860-439f6ed2b3b3 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 830.134886] env[65503]: DEBUG nova.compute.utils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 830.137113] env[65503]: DEBUG nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 830.137113] env[65503]: DEBUG nova.network.neutron [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 830.137113] env[65503]: WARNING neutronclient.v2_0.client [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 830.137384] env[65503]: WARNING neutronclient.v2_0.client [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 830.137790] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 830.138130] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 830.186687] env[65503]: DEBUG nova.compute.manager [req-2fe5c8dc-493e-477c-ae00-ef0fb0995982 req-55aebc23-16e5-44b7-a237-edbece474dcb service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Received event network-vif-plugged-2a4e1930-c256-421a-8d7b-a1ec9324152d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 830.186897] env[65503]: DEBUG oslo_concurrency.lockutils [req-2fe5c8dc-493e-477c-ae00-ef0fb0995982 req-55aebc23-16e5-44b7-a237-edbece474dcb service nova] Acquiring lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.187301] env[65503]: DEBUG oslo_concurrency.lockutils [req-2fe5c8dc-493e-477c-ae00-ef0fb0995982 req-55aebc23-16e5-44b7-a237-edbece474dcb service nova] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.187301] env[65503]: DEBUG oslo_concurrency.lockutils [req-2fe5c8dc-493e-477c-ae00-ef0fb0995982 req-55aebc23-16e5-44b7-a237-edbece474dcb service nova] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.187693] env[65503]: DEBUG nova.compute.manager [req-2fe5c8dc-493e-477c-ae00-ef0fb0995982 req-55aebc23-16e5-44b7-a237-edbece474dcb service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] No waiting events found dispatching network-vif-plugged-2a4e1930-c256-421a-8d7b-a1ec9324152d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 830.187693] env[65503]: WARNING nova.compute.manager [req-2fe5c8dc-493e-477c-ae00-ef0fb0995982 req-55aebc23-16e5-44b7-a237-edbece474dcb service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Received unexpected event network-vif-plugged-2a4e1930-c256-421a-8d7b-a1ec9324152d for instance with vm_state building and task_state spawning. [ 830.227967] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "refresh_cache-bcd845e2-5a89-4eef-bb76-33d69834bbc1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.228174] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "refresh_cache-bcd845e2-5a89-4eef-bb76-33d69834bbc1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.228334] env[65503]: DEBUG nova.network.neutron [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 830.234830] env[65503]: DEBUG nova.policy [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bb5e13cc99b46ec8eb5a6befc982bd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ecc78e2a9434c05bb07afa8e31e918d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 830.240680] env[65503]: INFO nova.compute.manager [-] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Took 1.97 seconds to deallocate network for instance. [ 830.406991] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.407128] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.408029] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.408029] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.408029] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.411347] env[65503]: INFO nova.compute.manager [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Terminating instance [ 830.475594] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c161f4d2-c837-4668-9122-a3ba0b022fae tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-a197b590-1f74-4241-9579-2f2d3bb89a1d-859e5cfc-c759-44b5-9f3e-fc7a2532d1aa" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.944s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.493229] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.493229] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.550571] env[65503]: DEBUG nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 830.601384] env[65503]: DEBUG nova.compute.manager [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 830.602447] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.602904] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dee7418-6ee9-46fd-9e00-9db073cd7922 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.618742] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.619437] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce9e75c4-e8d4-4b56-8d80-e57c50d7372a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.627966] env[65503]: DEBUG oslo_vmware.api [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 830.627966] env[65503]: value = "task-4450027" [ 830.627966] env[65503]: _type = "Task" [ 830.627966] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.642428] env[65503]: DEBUG oslo_vmware.api [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450027, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.646102] env[65503]: DEBUG nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 830.668681] env[65503]: DEBUG nova.network.neutron [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Successfully created port: 46f89336-5fb0-4852-bdd4-5f314fbda4f9 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 830.732240] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 830.732733] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 830.750227] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.779282] env[65503]: DEBUG nova.network.neutron [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 830.840727] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 830.841117] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 830.851138] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb67acf-e20a-4728-a2b0-baf9822d65e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.860942] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fbae4f-0486-422e-aa0c-a4b440020341 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.899068] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde8d1ce-00b1-4c94-a5fa-d58fdc918dd9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.907386] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd089cfb-018b-4ff0-8a22-b5f30af10c71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.928599] env[65503]: DEBUG nova.compute.manager [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 830.928829] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.929424] env[65503]: DEBUG nova.compute.provider_tree [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.931643] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0289182a-6405-4e75-8506-653ef1082a3a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.941029] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.941273] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d2f3b55-84bf-4279-88cf-bea74ba80474 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.946945] env[65503]: WARNING neutronclient.v2_0.client [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 830.947906] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 830.948376] env[65503]: WARNING openstack [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 830.961617] env[65503]: DEBUG oslo_vmware.api [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 830.961617] env[65503]: value = "task-4450028" [ 830.961617] env[65503]: _type = "Task" [ 830.961617] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.971363] env[65503]: DEBUG oslo_vmware.api [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.981086] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "e74fe378-737a-4732-9a2d-b889a436b8a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.981086] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "e74fe378-737a-4732-9a2d-b889a436b8a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.981613] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "e74fe378-737a-4732-9a2d-b889a436b8a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.981613] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "e74fe378-737a-4732-9a2d-b889a436b8a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.981708] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "e74fe378-737a-4732-9a2d-b889a436b8a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.984215] env[65503]: INFO nova.compute.manager [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Terminating instance [ 831.078694] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.086279] env[65503]: DEBUG nova.network.neutron [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Updating instance_info_cache with network_info: [{"id": "2a4e1930-c256-421a-8d7b-a1ec9324152d", "address": "fa:16:3e:8b:9d:ea", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a4e1930-c2", "ovs_interfaceid": "2a4e1930-c256-421a-8d7b-a1ec9324152d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 831.140690] env[65503]: DEBUG oslo_vmware.api [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450027, 'name': PowerOffVM_Task, 'duration_secs': 0.227212} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.140690] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 831.140918] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 831.141052] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44d67c5e-5c38-463a-a3d7-e7229b0894d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.215056] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 831.215461] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 831.215723] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleting the datastore file [datastore1] e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 831.216692] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38ffb29b-6146-46ab-8fa0-b01b69856122 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.224226] env[65503]: DEBUG oslo_vmware.api [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 831.224226] env[65503]: value = "task-4450030" [ 831.224226] env[65503]: _type = "Task" [ 831.224226] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.236479] env[65503]: DEBUG oslo_vmware.api [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.436821] env[65503]: DEBUG nova.scheduler.client.report [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.472664] env[65503]: DEBUG oslo_vmware.api [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450028, 'name': PowerOffVM_Task, 'duration_secs': 0.186661} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.472979] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 831.472979] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 831.473210] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ade290ed-9444-41c6-9bcd-af69bb3d7273 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.491692] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "refresh_cache-e74fe378-737a-4732-9a2d-b889a436b8a3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.491899] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquired lock "refresh_cache-e74fe378-737a-4732-9a2d-b889a436b8a3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.492304] env[65503]: DEBUG nova.network.neutron [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 831.534164] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 831.534460] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 831.534776] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleting the datastore file [datastore2] a197b590-1f74-4241-9579-2f2d3bb89a1d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 831.535364] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60686e95-6837-45d2-9d34-af521b44b3b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.544841] env[65503]: DEBUG oslo_vmware.api [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 831.544841] env[65503]: value = "task-4450032" [ 831.544841] env[65503]: _type = "Task" [ 831.544841] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.553808] env[65503]: DEBUG oslo_vmware.api [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.590220] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "refresh_cache-bcd845e2-5a89-4eef-bb76-33d69834bbc1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.590220] env[65503]: DEBUG nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Instance network_info: |[{"id": "2a4e1930-c256-421a-8d7b-a1ec9324152d", "address": "fa:16:3e:8b:9d:ea", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a4e1930-c2", "ovs_interfaceid": "2a4e1930-c256-421a-8d7b-a1ec9324152d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 831.590636] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:9d:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a4e1930-c256-421a-8d7b-a1ec9324152d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.598237] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 831.598490] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.598724] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b253ead7-78f8-4d23-9797-898bb27481aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.621784] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.621784] env[65503]: value = "task-4450033" [ 831.621784] env[65503]: _type = "Task" [ 831.621784] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.631848] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450033, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.657148] env[65503]: DEBUG nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 831.694136] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 831.698090] env[65503]: DEBUG nova.virt.hardware [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 831.698090] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d25d32-8128-4424-abdd-1cf80308231c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.710408] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233aba59-1b15-40b3-88e1-43925ad457fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.742115] env[65503]: DEBUG oslo_vmware.api [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194121} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.742472] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 831.742692] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 831.743295] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 831.743295] env[65503]: INFO nova.compute.manager [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Took 1.14 seconds to destroy the instance on the hypervisor. [ 831.744211] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 831.744211] env[65503]: DEBUG nova.compute.manager [-] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 831.744211] env[65503]: DEBUG nova.network.neutron [-] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 831.744211] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 831.744832] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 831.745433] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 831.828637] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 831.943100] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.315s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.945644] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.547s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.945917] env[65503]: DEBUG nova.objects.instance [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 831.971706] env[65503]: INFO nova.scheduler.client.report [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Deleted allocations for instance ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4 [ 831.995041] env[65503]: WARNING neutronclient.v2_0.client [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 831.995787] env[65503]: WARNING openstack [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 831.996168] env[65503]: WARNING openstack [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 832.056211] env[65503]: DEBUG oslo_vmware.api [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164869} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.059059] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 832.059059] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 832.059059] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.059059] env[65503]: INFO nova.compute.manager [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 832.059059] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 832.059059] env[65503]: DEBUG nova.compute.manager [-] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 832.059059] env[65503]: DEBUG nova.network.neutron [-] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 832.059059] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 832.059059] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 832.059059] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 832.134659] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450033, 'name': CreateVM_Task, 'duration_secs': 0.386848} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.134659] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.135245] env[65503]: WARNING neutronclient.v2_0.client [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 832.135703] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.135901] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.136296] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.136957] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c17417f4-6fbb-4206-abb5-15c1c0b055a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.142862] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 832.142862] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa3343-c71b-6578-8f2f-18b9288ba8a2" [ 832.142862] env[65503]: _type = "Task" [ 832.142862] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.154494] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa3343-c71b-6578-8f2f-18b9288ba8a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.323428] env[65503]: DEBUG nova.network.neutron [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 832.341767] env[65503]: DEBUG nova.network.neutron [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Successfully updated port: 46f89336-5fb0-4852-bdd4-5f314fbda4f9 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 832.359621] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 832.484612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b5671b99-1afb-40dc-aefd-ecd884bf109f tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.015s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.485566] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 28.794s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.485783] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.485978] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.486148] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.487820] env[65503]: INFO nova.compute.manager [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Terminating instance [ 832.499784] env[65503]: DEBUG nova.network.neutron [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 832.592435] env[65503]: DEBUG nova.network.neutron [-] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 832.658018] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa3343-c71b-6578-8f2f-18b9288ba8a2, 'name': SearchDatastore_Task, 'duration_secs': 0.019844} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.663022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.663022] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.663022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.663022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.663022] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.663022] env[65503]: DEBUG nova.compute.manager [req-19bcd75a-fdb7-417d-977a-1eeea1f082f1 req-c8ef1d42-ad94-480e-9fff-41175a696d4c service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Received event network-vif-plugged-46f89336-5fb0-4852-bdd4-5f314fbda4f9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 832.663022] env[65503]: DEBUG oslo_concurrency.lockutils [req-19bcd75a-fdb7-417d-977a-1eeea1f082f1 req-c8ef1d42-ad94-480e-9fff-41175a696d4c service nova] Acquiring lock "972a50ed-759a-4312-9314-9bf01a03fc3a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.663022] env[65503]: DEBUG oslo_concurrency.lockutils [req-19bcd75a-fdb7-417d-977a-1eeea1f082f1 req-c8ef1d42-ad94-480e-9fff-41175a696d4c service nova] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.663022] env[65503]: DEBUG oslo_concurrency.lockutils [req-19bcd75a-fdb7-417d-977a-1eeea1f082f1 req-c8ef1d42-ad94-480e-9fff-41175a696d4c service nova] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.663022] env[65503]: DEBUG nova.compute.manager [req-19bcd75a-fdb7-417d-977a-1eeea1f082f1 req-c8ef1d42-ad94-480e-9fff-41175a696d4c service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] No waiting events found dispatching network-vif-plugged-46f89336-5fb0-4852-bdd4-5f314fbda4f9 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 832.663022] env[65503]: WARNING nova.compute.manager [req-19bcd75a-fdb7-417d-977a-1eeea1f082f1 req-c8ef1d42-ad94-480e-9fff-41175a696d4c service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Received unexpected event network-vif-plugged-46f89336-5fb0-4852-bdd4-5f314fbda4f9 for instance with vm_state building and task_state spawning. [ 832.664541] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d380ae80-a08b-4d45-9289-de4b5919dab0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.679304] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.679465] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.683387] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3d8d4e8-3508-43c5-9b2d-52cf57e27ee8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.691415] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 832.691415] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214df1b-c287-90ea-c9ab-38c6f7d4bcc1" [ 832.691415] env[65503]: _type = "Task" [ 832.691415] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.702604] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214df1b-c287-90ea-c9ab-38c6f7d4bcc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.714303] env[65503]: DEBUG nova.compute.manager [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Received event network-changed-2a4e1930-c256-421a-8d7b-a1ec9324152d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 832.714447] env[65503]: DEBUG nova.compute.manager [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Refreshing instance network info cache due to event network-changed-2a4e1930-c256-421a-8d7b-a1ec9324152d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 832.715020] env[65503]: DEBUG oslo_concurrency.lockutils [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Acquiring lock "refresh_cache-bcd845e2-5a89-4eef-bb76-33d69834bbc1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.715020] env[65503]: DEBUG oslo_concurrency.lockutils [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Acquired lock "refresh_cache-bcd845e2-5a89-4eef-bb76-33d69834bbc1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.715020] env[65503]: DEBUG nova.network.neutron [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Refreshing network info cache for port 2a4e1930-c256-421a-8d7b-a1ec9324152d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 832.844312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "refresh_cache-972a50ed-759a-4312-9314-9bf01a03fc3a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.844532] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "refresh_cache-972a50ed-759a-4312-9314-9bf01a03fc3a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.844691] env[65503]: DEBUG nova.network.neutron [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 832.956832] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb29bf7d-696a-417f-8629-d3bcfa26eb6a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.958514] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.500s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.958634] env[65503]: DEBUG nova.objects.instance [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lazy-loading 'resources' on Instance uuid 1bda7a65-0231-4753-9762-43e9b13bd893 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 832.991848] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.992040] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquired lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.992549] env[65503]: DEBUG nova.network.neutron [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 833.002294] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Releasing lock "refresh_cache-e74fe378-737a-4732-9a2d-b889a436b8a3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.002711] env[65503]: DEBUG nova.compute.manager [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 833.002894] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.003889] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4879af-8b08-4502-90db-6631919b6f8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.014700] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.014700] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-adcaa64c-eb86-4875-8be8-fbc5b665c37d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.021888] env[65503]: DEBUG oslo_vmware.api [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 833.021888] env[65503]: value = "task-4450034" [ 833.021888] env[65503]: _type = "Task" [ 833.021888] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.032429] env[65503]: DEBUG oslo_vmware.api [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.096737] env[65503]: INFO nova.compute.manager [-] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Took 1.35 seconds to deallocate network for instance. [ 833.209342] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214df1b-c287-90ea-c9ab-38c6f7d4bcc1, 'name': SearchDatastore_Task, 'duration_secs': 0.009888} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.210852] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b02b36d9-eea4-4853-8a22-4c5b8e0c995a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.218363] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 833.218363] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52943212-2eb2-a7ad-c2d2-e76f86bdb6a3" [ 833.218363] env[65503]: _type = "Task" [ 833.218363] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.223744] env[65503]: WARNING neutronclient.v2_0.client [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 833.224461] env[65503]: WARNING openstack [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.224808] env[65503]: WARNING openstack [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.240263] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52943212-2eb2-a7ad-c2d2-e76f86bdb6a3, 'name': SearchDatastore_Task, 'duration_secs': 0.011448} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.240556] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.240818] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bcd845e2-5a89-4eef-bb76-33d69834bbc1/bcd845e2-5a89-4eef-bb76-33d69834bbc1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 833.241169] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60a19f7e-976a-4380-9019-2f4174d13989 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.249501] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 833.249501] env[65503]: value = "task-4450035" [ 833.249501] env[65503]: _type = "Task" [ 833.249501] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.258522] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450035, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.338874] env[65503]: DEBUG nova.network.neutron [-] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 833.350893] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.350893] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.399600] env[65503]: DEBUG nova.network.neutron [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 833.417473] env[65503]: WARNING openstack [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.417974] env[65503]: WARNING openstack [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.436708] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.436708] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.496509] env[65503]: DEBUG nova.compute.utils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Can not refresh info_cache because instance was not found {{(pid=65503) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 833.496509] env[65503]: WARNING neutronclient.v2_0.client [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 833.496820] env[65503]: WARNING openstack [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.497181] env[65503]: WARNING openstack [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.538042] env[65503]: DEBUG oslo_vmware.api [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450034, 'name': PowerOffVM_Task, 'duration_secs': 0.191508} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.539502] env[65503]: DEBUG nova.network.neutron [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 833.541383] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.541841] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.541934] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b45e998-45bc-438a-bfd0-c74ff661b422 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.567959] env[65503]: WARNING neutronclient.v2_0.client [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 833.568595] env[65503]: WARNING openstack [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.569080] env[65503]: WARNING openstack [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.579395] env[65503]: WARNING neutronclient.v2_0.client [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 833.580053] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.580414] env[65503]: WARNING openstack [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.590810] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.591162] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.591222] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Deleting the datastore file [datastore2] e74fe378-737a-4732-9a2d-b889a436b8a3 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.592018] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d9fc6c1-549e-4400-850a-a5087e7ae354 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.600888] env[65503]: DEBUG oslo_vmware.api [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for the task: (returnval){ [ 833.600888] env[65503]: value = "task-4450037" [ 833.600888] env[65503]: _type = "Task" [ 833.600888] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.607813] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.614680] env[65503]: DEBUG oslo_vmware.api [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.695919] env[65503]: DEBUG nova.network.neutron [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 833.731600] env[65503]: DEBUG nova.network.neutron [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Updated VIF entry in instance network info cache for port 2a4e1930-c256-421a-8d7b-a1ec9324152d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 833.732047] env[65503]: DEBUG nova.network.neutron [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Updating instance_info_cache with network_info: [{"id": "2a4e1930-c256-421a-8d7b-a1ec9324152d", "address": "fa:16:3e:8b:9d:ea", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a4e1930-c2", "ovs_interfaceid": "2a4e1930-c256-421a-8d7b-a1ec9324152d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 833.742903] env[65503]: DEBUG nova.network.neutron [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Updating instance_info_cache with network_info: [{"id": "46f89336-5fb0-4852-bdd4-5f314fbda4f9", "address": "fa:16:3e:7d:f8:3f", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46f89336-5f", "ovs_interfaceid": "46f89336-5fb0-4852-bdd4-5f314fbda4f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 833.763129] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450035, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.840885] env[65503]: INFO nova.compute.manager [-] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Took 1.78 seconds to deallocate network for instance. [ 834.092501] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0203275e-f122-4daf-addb-e8ab010328f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.101178] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c372419d-04a5-4daf-b1ff-dde9bbbdff04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.114300] env[65503]: DEBUG oslo_vmware.api [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Task: {'id': task-4450037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228149} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.145536] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.145665] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 834.145876] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.146064] env[65503]: INFO nova.compute.manager [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 834.146391] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 834.148156] env[65503]: DEBUG nova.compute.manager [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 834.148156] env[65503]: DEBUG nova.network.neutron [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 834.148156] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 834.148403] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 834.149024] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 834.157744] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b999902-07fa-4ad4-a7e3-c6dcbb026c97 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.168167] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55841277-594a-4daa-a72b-a78f25826c78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.184300] env[65503]: DEBUG nova.compute.provider_tree [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.186357] env[65503]: DEBUG nova.network.neutron [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 834.186591] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 834.201432] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Releasing lock "refresh_cache-ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.201872] env[65503]: DEBUG nova.compute.manager [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 834.202076] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.202659] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e26f5342-fd79-4953-85bc-ac30761b75b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.216087] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebabd9a-b5bc-4fb2-bbfb-3a429e350ae6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.235417] env[65503]: DEBUG oslo_concurrency.lockutils [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] Releasing lock "refresh_cache-bcd845e2-5a89-4eef-bb76-33d69834bbc1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.236448] env[65503]: DEBUG nova.compute.manager [req-62909818-cb8c-480a-bbb6-b19845ae2751 req-9b7db830-c257-4a3e-aa0a-b19234fdfe50 service nova] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Received event network-vif-deleted-d6631e8b-b965-49cf-894f-adee439934ac {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 834.253117] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "refresh_cache-972a50ed-759a-4312-9314-9bf01a03fc3a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.253546] env[65503]: DEBUG nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Instance network_info: |[{"id": "46f89336-5fb0-4852-bdd4-5f314fbda4f9", "address": "fa:16:3e:7d:f8:3f", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46f89336-5f", "ovs_interfaceid": "46f89336-5fb0-4852-bdd4-5f314fbda4f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 834.254108] env[65503]: WARNING nova.virt.vmwareapi.vmops [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4 could not be found. [ 834.254289] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.254475] env[65503]: INFO nova.compute.manager [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 834.254931] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 834.256273] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:f8:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46f89336-5fb0-4852-bdd4-5f314fbda4f9', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.263557] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 834.266744] env[65503]: DEBUG nova.compute.manager [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 834.266856] env[65503]: DEBUG nova.network.neutron [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 834.267117] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 834.267642] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 834.267896] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 834.274802] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.276651] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1ca9025-02c8-4677-9db2-e5ea8d079260 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.296027] env[65503]: DEBUG nova.network.neutron [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 834.296027] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 834.299460] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450035, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516904} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.299773] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bcd845e2-5a89-4eef-bb76-33d69834bbc1/bcd845e2-5a89-4eef-bb76-33d69834bbc1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.300035] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.300350] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-270b243e-008e-425a-9e60-b48c4fe0bdc4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.306391] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.306391] env[65503]: value = "task-4450038" [ 834.306391] env[65503]: _type = "Task" [ 834.306391] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.312060] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 834.312060] env[65503]: value = "task-4450039" [ 834.312060] env[65503]: _type = "Task" [ 834.312060] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.319804] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450038, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.326745] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450039, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.329840] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "45a4b511-aa6a-433d-b136-f53686db9575" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.330133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "45a4b511-aa6a-433d-b136-f53686db9575" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.330379] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "45a4b511-aa6a-433d-b136-f53686db9575-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.330595] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "45a4b511-aa6a-433d-b136-f53686db9575-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.330860] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "45a4b511-aa6a-433d-b136-f53686db9575-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.333927] env[65503]: INFO nova.compute.manager [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Terminating instance [ 834.351782] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.581612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.581852] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.686184] env[65503]: DEBUG nova.compute.manager [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Received event network-changed-46f89336-5fb0-4852-bdd4-5f314fbda4f9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 834.686429] env[65503]: DEBUG nova.compute.manager [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Refreshing instance network info cache due to event network-changed-46f89336-5fb0-4852-bdd4-5f314fbda4f9. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 834.686589] env[65503]: DEBUG oslo_concurrency.lockutils [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Acquiring lock "refresh_cache-972a50ed-759a-4312-9314-9bf01a03fc3a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.686720] env[65503]: DEBUG oslo_concurrency.lockutils [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Acquired lock "refresh_cache-972a50ed-759a-4312-9314-9bf01a03fc3a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.686879] env[65503]: DEBUG nova.network.neutron [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Refreshing network info cache for port 46f89336-5fb0-4852-bdd4-5f314fbda4f9 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 834.688834] env[65503]: DEBUG nova.scheduler.client.report [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.692404] env[65503]: DEBUG nova.network.neutron [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 834.743530] env[65503]: DEBUG nova.compute.manager [req-54d5e05f-f57f-4c14-ad68-7d8c98ae4022 req-59df4935-24c4-42f5-bc3d-590f2e4a2701 service nova] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Received event network-vif-deleted-f191aa02-3240-4647-9358-ee80ef3eb29d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 834.795912] env[65503]: DEBUG nova.network.neutron [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 834.829434] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450038, 'name': CreateVM_Task, 'duration_secs': 0.398312} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.831862] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.832169] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450039, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074626} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.832653] env[65503]: WARNING neutronclient.v2_0.client [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 834.833031] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.833179] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.833507] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 834.833778] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.834067] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01de1bdb-fb07-4336-8d44-f1aabe6f3262 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.836251] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fbe215-e1ff-4195-87c7-1ba74fcff623 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.838903] env[65503]: DEBUG nova.compute.manager [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 834.839111] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.840600] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb91d58c-8fbb-445a-b202-2abe5c69bd05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.844291] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 834.844291] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c72cb5-7800-ced8-a576-34c5d22b85b5" [ 834.844291] env[65503]: _type = "Task" [ 834.844291] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.859812] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.868818] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] bcd845e2-5a89-4eef-bb76-33d69834bbc1/bcd845e2-5a89-4eef-bb76-33d69834bbc1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.869520] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa9ea624-1ce1-4c50-81dc-7ef1c1a000bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.871368] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd3aa2c6-a9b4-440e-ba26-04e0682b268f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.889123] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c72cb5-7800-ced8-a576-34c5d22b85b5, 'name': SearchDatastore_Task, 'duration_secs': 0.01031} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.889970] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.890252] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.890484] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.890632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.890802] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.891056] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b667b142-7b7d-42b8-81c2-546eb09a2221 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.894719] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 834.894719] env[65503]: value = "task-4450041" [ 834.894719] env[65503]: _type = "Task" [ 834.894719] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.895874] env[65503]: DEBUG oslo_vmware.api [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 834.895874] env[65503]: value = "task-4450040" [ 834.895874] env[65503]: _type = "Task" [ 834.895874] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.902935] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.903238] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.904318] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8149c8f-06a7-4433-876c-9544867d812b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.912598] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.912872] env[65503]: DEBUG oslo_vmware.api [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4450040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.916375] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 834.916375] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529cbc79-f39e-8ec2-a1fb-902923145493" [ 834.916375] env[65503]: _type = "Task" [ 834.916375] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.925870] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529cbc79-f39e-8ec2-a1fb-902923145493, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.193750] env[65503]: WARNING neutronclient.v2_0.client [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 835.194388] env[65503]: WARNING openstack [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 835.194883] env[65503]: WARNING openstack [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 835.202777] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.245s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.206071] env[65503]: INFO nova.compute.manager [-] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Took 1.06 seconds to deallocate network for instance. [ 835.206695] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.785s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.208919] env[65503]: INFO nova.compute.claims [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.239559] env[65503]: INFO nova.scheduler.client.report [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleted allocations for instance 1bda7a65-0231-4753-9762-43e9b13bd893 [ 835.298891] env[65503]: INFO nova.compute.manager [-] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Took 1.03 seconds to deallocate network for instance. [ 835.325356] env[65503]: WARNING openstack [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 835.325745] env[65503]: WARNING openstack [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 835.396970] env[65503]: WARNING neutronclient.v2_0.client [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 835.399296] env[65503]: WARNING openstack [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 835.399296] env[65503]: WARNING openstack [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 835.426560] env[65503]: DEBUG oslo_vmware.api [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4450040, 'name': PowerOffVM_Task, 'duration_secs': 0.218533} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.426798] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450041, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.427060] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.427225] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.427921] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58d392cf-14f3-466a-821a-28761fdf299e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.433463] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529cbc79-f39e-8ec2-a1fb-902923145493, 'name': SearchDatastore_Task, 'duration_secs': 0.015443} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.434311] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab37a167-2d9d-407f-bb7f-8a500865d4cf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.440520] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 835.440520] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d5b51-dced-190f-c56a-62a550598915" [ 835.440520] env[65503]: _type = "Task" [ 835.440520] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.452529] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d5b51-dced-190f-c56a-62a550598915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.489762] env[65503]: DEBUG nova.network.neutron [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Updated VIF entry in instance network info cache for port 46f89336-5fb0-4852-bdd4-5f314fbda4f9. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 835.490172] env[65503]: DEBUG nova.network.neutron [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Updating instance_info_cache with network_info: [{"id": "46f89336-5fb0-4852-bdd4-5f314fbda4f9", "address": "fa:16:3e:7d:f8:3f", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46f89336-5f", "ovs_interfaceid": "46f89336-5fb0-4852-bdd4-5f314fbda4f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 835.500645] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.500887] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.501104] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Deleting the datastore file [datastore1] 45a4b511-aa6a-433d-b136-f53686db9575 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.502139] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6217cbe7-f60d-4ef3-ad62-3a79fe059117 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.509407] env[65503]: DEBUG oslo_vmware.api [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for the task: (returnval){ [ 835.509407] env[65503]: value = "task-4450043" [ 835.509407] env[65503]: _type = "Task" [ 835.509407] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.519333] env[65503]: DEBUG oslo_vmware.api [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4450043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.718618] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.751717] env[65503]: DEBUG oslo_concurrency.lockutils [None req-64b5f3d9-8ab5-469a-9656-741a702b549b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "1bda7a65-0231-4753-9762-43e9b13bd893" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.840s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.809031] env[65503]: INFO nova.compute.manager [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance disappeared during terminate [ 835.809286] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9de67bbd-cb93-4416-a2d0-2aa73e10cf0b tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.324s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.917686] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450041, 'name': ReconfigVM_Task, 'duration_secs': 0.723421} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.917974] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Reconfigured VM instance instance-0000003a to attach disk [datastore2] bcd845e2-5a89-4eef-bb76-33d69834bbc1/bcd845e2-5a89-4eef-bb76-33d69834bbc1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.918653] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d9bbd4e-43ee-4fcf-bb15-740e83984586 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.926430] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 835.926430] env[65503]: value = "task-4450044" [ 835.926430] env[65503]: _type = "Task" [ 835.926430] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.935471] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450044, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.951534] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d5b51-dced-190f-c56a-62a550598915, 'name': SearchDatastore_Task, 'duration_secs': 0.012179} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.951821] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.952224] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 972a50ed-759a-4312-9314-9bf01a03fc3a/972a50ed-759a-4312-9314-9bf01a03fc3a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.952614] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-955b8001-2426-427e-82e2-cc8ad8e76585 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.961430] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 835.961430] env[65503]: value = "task-4450045" [ 835.961430] env[65503]: _type = "Task" [ 835.961430] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.971245] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.994235] env[65503]: DEBUG oslo_concurrency.lockutils [req-f0fcfd17-7c51-4238-a8a2-fe486202f420 req-65cd3e2b-7d74-4cbd-9805-b2f8d3a44bf7 service nova] Releasing lock "refresh_cache-972a50ed-759a-4312-9314-9bf01a03fc3a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.021375] env[65503]: DEBUG oslo_vmware.api [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Task: {'id': task-4450043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182503} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.021375] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.021375] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.021657] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.021657] env[65503]: INFO nova.compute.manager [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Took 1.18 seconds to destroy the instance on the hypervisor. [ 836.022528] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 836.022528] env[65503]: DEBUG nova.compute.manager [-] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 836.022528] env[65503]: DEBUG nova.network.neutron [-] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 836.022808] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 836.023290] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 836.023551] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 836.091983] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 836.439542] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450044, 'name': Rename_Task, 'duration_secs': 0.204574} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.443725] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.443932] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ac5e584-db6b-49b5-b1a9-beb08f3a0886 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.457901] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 836.457901] env[65503]: value = "task-4450046" [ 836.457901] env[65503]: _type = "Task" [ 836.457901] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.468738] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.478991] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450045, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.711271] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "f840b178-fd54-4c84-808c-a14c99a5ecdd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.711550] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.711756] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "f840b178-fd54-4c84-808c-a14c99a5ecdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.711931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.712108] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.714802] env[65503]: INFO nova.compute.manager [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Terminating instance [ 836.784333] env[65503]: DEBUG nova.compute.manager [req-68c67d09-2a4b-4344-bd44-1a493e952aa9 req-f634976f-3706-4ec1-b20b-1bd175445a8a service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Received event network-vif-deleted-0edc90ad-4b80-4fad-8456-06f696d9756a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 836.784642] env[65503]: INFO nova.compute.manager [req-68c67d09-2a4b-4344-bd44-1a493e952aa9 req-f634976f-3706-4ec1-b20b-1bd175445a8a service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Neutron deleted interface 0edc90ad-4b80-4fad-8456-06f696d9756a; detaching it from the instance and deleting it from the info cache [ 836.784867] env[65503]: DEBUG nova.network.neutron [req-68c67d09-2a4b-4344-bd44-1a493e952aa9 req-f634976f-3706-4ec1-b20b-1bd175445a8a service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 836.811051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "62a18449-7cec-4785-a340-d0450adc8044" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.811734] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "62a18449-7cec-4785-a340-d0450adc8044" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.812501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "62a18449-7cec-4785-a340-d0450adc8044-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.812501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "62a18449-7cec-4785-a340-d0450adc8044-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.812501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "62a18449-7cec-4785-a340-d0450adc8044-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.817789] env[65503]: INFO nova.compute.manager [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Terminating instance [ 836.872292] env[65503]: DEBUG nova.network.neutron [-] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 836.888351] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70c05aa-544e-4d81-a6ab-efbf69c59daf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.898545] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e2aa6c-ab5b-4f52-a4ea-927666b30b99 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.930720] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f116f5a-f954-48e8-b58e-986c15a1c8ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.939820] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ba4469-ca6a-4f44-905e-03ded233c18e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.955711] env[65503]: DEBUG nova.compute.provider_tree [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.970110] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450046, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.976681] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615113} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.976934] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 972a50ed-759a-4312-9314-9bf01a03fc3a/972a50ed-759a-4312-9314-9bf01a03fc3a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.977165] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.977418] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1136f004-3cca-4758-8867-14155065270f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.985088] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 836.985088] env[65503]: value = "task-4450047" [ 836.985088] env[65503]: _type = "Task" [ 836.985088] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.996068] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.222188] env[65503]: DEBUG nova.compute.manager [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 837.222491] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 837.223418] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acffd54c-941c-4f51-a548-a6b5ad8e79ac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.233930] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 837.234230] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4e86708-750b-42c3-8292-eb61a4ae4ae7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.240751] env[65503]: DEBUG oslo_vmware.api [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 837.240751] env[65503]: value = "task-4450048" [ 837.240751] env[65503]: _type = "Task" [ 837.240751] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.249375] env[65503]: DEBUG oslo_vmware.api [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.290384] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-375e35cd-669b-4bd7-8691-abf9eb5901be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.300899] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347aa25f-89ab-4632-80d4-eed11ae711f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.335819] env[65503]: DEBUG nova.compute.manager [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 837.336131] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 837.336556] env[65503]: DEBUG nova.compute.manager [req-68c67d09-2a4b-4344-bd44-1a493e952aa9 req-f634976f-3706-4ec1-b20b-1bd175445a8a service nova] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Detach interface failed, port_id=0edc90ad-4b80-4fad-8456-06f696d9756a, reason: Instance 45a4b511-aa6a-433d-b136-f53686db9575 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 837.337645] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9639378f-5194-4a30-8542-25e258479229 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.345968] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 837.346262] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbd53b00-6e33-48e7-94a9-fc5adcb0d061 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.353149] env[65503]: DEBUG oslo_vmware.api [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 837.353149] env[65503]: value = "task-4450049" [ 837.353149] env[65503]: _type = "Task" [ 837.353149] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.362129] env[65503]: DEBUG oslo_vmware.api [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450049, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.376077] env[65503]: INFO nova.compute.manager [-] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Took 1.35 seconds to deallocate network for instance. [ 837.461745] env[65503]: DEBUG nova.scheduler.client.report [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.480246] env[65503]: DEBUG oslo_vmware.api [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450046, 'name': PowerOnVM_Task, 'duration_secs': 0.655441} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.480771] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.481135] env[65503]: INFO nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Took 8.58 seconds to spawn the instance on the hypervisor. [ 837.481481] env[65503]: DEBUG nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 837.483387] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8e1c4a-335a-4841-b33f-cadb213e8448 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.503798] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086744} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.504716] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.506173] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be74d54-e179-4403-8868-8a7341ca2edc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.533856] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 972a50ed-759a-4312-9314-9bf01a03fc3a/972a50ed-759a-4312-9314-9bf01a03fc3a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.534245] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efcaa17a-d08e-4dd9-bbac-62f20252055f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.557440] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 837.557440] env[65503]: value = "task-4450050" [ 837.557440] env[65503]: _type = "Task" [ 837.557440] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.572987] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450050, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.753914] env[65503]: DEBUG oslo_vmware.api [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450048, 'name': PowerOffVM_Task, 'duration_secs': 0.432536} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.754338] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.754506] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 837.754779] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50fbfa57-9128-4616-9b33-ebbdf991774f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.843285] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.843683] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.843683] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleting the datastore file [datastore2] f840b178-fd54-4c84-808c-a14c99a5ecdd {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.844078] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8634478e-ce3a-46bd-9bbf-dcdca0821aa4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.851765] env[65503]: DEBUG oslo_vmware.api [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 837.851765] env[65503]: value = "task-4450052" [ 837.851765] env[65503]: _type = "Task" [ 837.851765] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.864612] env[65503]: DEBUG oslo_vmware.api [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.867853] env[65503]: DEBUG oslo_vmware.api [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450049, 'name': PowerOffVM_Task, 'duration_secs': 0.210785} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.868131] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.868292] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 837.868549] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa91d8df-43b7-4d81-a6a3-07277d15c97d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.883640] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.948886] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.949082] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.949285] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleting the datastore file [datastore2] 62a18449-7cec-4785-a340-d0450adc8044 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.949569] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-879adde9-f9d9-4e2d-b0af-75973edf32f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.957156] env[65503]: DEBUG oslo_vmware.api [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for the task: (returnval){ [ 837.957156] env[65503]: value = "task-4450054" [ 837.957156] env[65503]: _type = "Task" [ 837.957156] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.966449] env[65503]: DEBUG oslo_vmware.api [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.971440] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.972043] env[65503]: DEBUG nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 837.975426] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.514s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.976578] env[65503]: INFO nova.compute.claims [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.011038] env[65503]: INFO nova.compute.manager [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Took 54.24 seconds to build instance. [ 838.068282] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450050, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.363047] env[65503]: DEBUG oslo_vmware.api [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.32381} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.363047] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.363047] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 838.363047] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 838.363342] env[65503]: INFO nova.compute.manager [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 838.363550] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 838.363811] env[65503]: DEBUG nova.compute.manager [-] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 838.363938] env[65503]: DEBUG nova.network.neutron [-] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 838.364279] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 838.364991] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 838.365308] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 838.426544] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 838.468963] env[65503]: DEBUG oslo_vmware.api [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Task: {'id': task-4450054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261736} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.469467] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.469718] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 838.469956] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 838.470237] env[65503]: INFO nova.compute.manager [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Took 1.13 seconds to destroy the instance on the hypervisor. [ 838.470801] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 838.471092] env[65503]: DEBUG nova.compute.manager [-] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 838.471289] env[65503]: DEBUG nova.network.neutron [-] [instance: 62a18449-7cec-4785-a340-d0450adc8044] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 838.471570] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 838.472169] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 838.472523] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 838.481533] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.506s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.482310] env[65503]: DEBUG nova.compute.utils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Instance 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69 could not be found. {{(pid=65503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 838.485431] env[65503]: DEBUG nova.compute.utils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 838.487597] env[65503]: DEBUG nova.compute.manager [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Instance disappeared during build. {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2557}} [ 838.488542] env[65503]: DEBUG nova.compute.manager [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Unplugging VIFs for instance {{(pid=65503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3051}} [ 838.488542] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-4c6d9dca-6b62-41ca-a033-2fa9d8d86f69" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.488542] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-4c6d9dca-6b62-41ca-a033-2fa9d8d86f69" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.488542] env[65503]: DEBUG nova.network.neutron [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 838.489862] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.116s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.491752] env[65503]: INFO nova.compute.claims [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.494644] env[65503]: DEBUG nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 838.494836] env[65503]: DEBUG nova.network.neutron [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 838.495176] env[65503]: WARNING neutronclient.v2_0.client [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 838.495501] env[65503]: WARNING neutronclient.v2_0.client [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 838.496738] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 838.497101] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 838.515089] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fca73d3-5873-4738-8c6a-50ffcfee1ba7 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.766s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.516589] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 838.570419] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450050, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.582542] env[65503]: DEBUG nova.policy [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e446ab541084695871cc3feac9835fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19e0e62fe31a46fc802dbfc625ac7645', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 838.854109] env[65503]: DEBUG nova.compute.manager [req-56ccbb83-96b0-4127-a986-e65a0ce65fa5 req-20baf9bd-3bc0-494c-bff0-c90a69e98e69 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Received event network-vif-deleted-4fb9999c-b567-4158-9058-b0c36232af7c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 838.854390] env[65503]: INFO nova.compute.manager [req-56ccbb83-96b0-4127-a986-e65a0ce65fa5 req-20baf9bd-3bc0-494c-bff0-c90a69e98e69 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Neutron deleted interface 4fb9999c-b567-4158-9058-b0c36232af7c; detaching it from the instance and deleting it from the info cache [ 838.854543] env[65503]: DEBUG nova.network.neutron [req-56ccbb83-96b0-4127-a986-e65a0ce65fa5 req-20baf9bd-3bc0-494c-bff0-c90a69e98e69 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 838.987840] env[65503]: DEBUG nova.compute.manager [req-97872e7b-5d9b-496a-aaf9-bf328c6473e7 req-39e737e5-6711-4a61-a2cb-bde9051ddd3a service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Received event network-vif-deleted-264a9149-95a0-4c4c-89d4-578b30882bcb {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 838.987840] env[65503]: INFO nova.compute.manager [req-97872e7b-5d9b-496a-aaf9-bf328c6473e7 req-39e737e5-6711-4a61-a2cb-bde9051ddd3a service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Neutron deleted interface 264a9149-95a0-4c4c-89d4-578b30882bcb; detaching it from the instance and deleting it from the info cache [ 838.987988] env[65503]: DEBUG nova.network.neutron [req-97872e7b-5d9b-496a-aaf9-bf328c6473e7 req-39e737e5-6711-4a61-a2cb-bde9051ddd3a service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 838.991437] env[65503]: DEBUG nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 838.994566] env[65503]: DEBUG nova.compute.utils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Can not refresh info_cache because instance was not found {{(pid=65503) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 838.994971] env[65503]: WARNING neutronclient.v2_0.client [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: nova.exception_Remote.InstanceNotFound_Remote: Instance 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69 could not be found. [ 838.997096] env[65503]: WARNING openstack [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 838.997527] env[65503]: WARNING openstack [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 839.021288] env[65503]: DEBUG nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 839.027870] env[65503]: DEBUG nova.network.neutron [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 839.066069] env[65503]: DEBUG nova.network.neutron [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Successfully created port: 12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 839.075782] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450050, 'name': ReconfigVM_Task, 'duration_secs': 1.117868} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.076078] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 972a50ed-759a-4312-9314-9bf01a03fc3a/972a50ed-759a-4312-9314-9bf01a03fc3a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.077425] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ed74e77-440f-4f8f-a07e-f0244c8dc6aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.085543] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 839.085543] env[65503]: value = "task-4450055" [ 839.085543] env[65503]: _type = "Task" [ 839.085543] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.095551] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450055, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.175338] env[65503]: DEBUG nova.network.neutron [-] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 839.204927] env[65503]: DEBUG nova.network.neutron [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 839.268993] env[65503]: DEBUG nova.network.neutron [-] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 839.362182] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-debe9fbf-47b3-42b3-962c-bf0bb98c5c35 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.373290] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9f0803-8b39-492b-8db2-992a1bb03fae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.408302] env[65503]: DEBUG nova.compute.manager [req-56ccbb83-96b0-4127-a986-e65a0ce65fa5 req-20baf9bd-3bc0-494c-bff0-c90a69e98e69 service nova] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Detach interface failed, port_id=4fb9999c-b567-4158-9058-b0c36232af7c, reason: Instance f840b178-fd54-4c84-808c-a14c99a5ecdd could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 839.495858] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d21b3eaa-d422-4335-b06a-cd57fec3ffe3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.506639] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abafbde-f3db-407f-93d0-9c05adc81d58 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.551349] env[65503]: DEBUG nova.compute.manager [req-97872e7b-5d9b-496a-aaf9-bf328c6473e7 req-39e737e5-6711-4a61-a2cb-bde9051ddd3a service nova] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Detach interface failed, port_id=264a9149-95a0-4c4c-89d4-578b30882bcb, reason: Instance 62a18449-7cec-4785-a340-d0450adc8044 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 839.552559] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.598493] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450055, 'name': Rename_Task, 'duration_secs': 0.153552} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.598738] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.598983] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d96fbf7a-eccd-42ff-b324-5447e8dc38ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.606900] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 839.606900] env[65503]: value = "task-4450056" [ 839.606900] env[65503]: _type = "Task" [ 839.606900] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.614920] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.679882] env[65503]: INFO nova.compute.manager [-] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Took 1.32 seconds to deallocate network for instance. [ 839.710134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-4c6d9dca-6b62-41ca-a033-2fa9d8d86f69" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.710406] env[65503]: DEBUG nova.compute.manager [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=65503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3074}} [ 839.710587] env[65503]: DEBUG nova.compute.manager [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 839.710747] env[65503]: DEBUG nova.network.neutron [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 839.711112] env[65503]: WARNING neutronclient.v2_0.client [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: nova.exception_Remote.InstanceNotFound_Remote: Instance 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69 could not be found. [ 839.711934] env[65503]: WARNING openstack [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 839.712450] env[65503]: WARNING openstack [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 839.736751] env[65503]: DEBUG nova.network.neutron [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 839.737154] env[65503]: WARNING neutronclient.v2_0.client [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: nova.exception_Remote.InstanceNotFound_Remote: Instance 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69 could not be found. [ 839.771870] env[65503]: INFO nova.compute.manager [-] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Took 1.30 seconds to deallocate network for instance. [ 840.025197] env[65503]: DEBUG nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 840.051648] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aeb746e-bab0-4bf4-a1cf-3e803378ab28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.059381] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 840.059717] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 840.060058] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 840.060228] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 840.060439] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 840.060632] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 840.060891] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.061070] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 840.061242] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 840.061400] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 840.061664] env[65503]: DEBUG nova.virt.hardware [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 840.062630] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02883373-8a55-44f2-a3b5-bb8368b07831 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.073495] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ba7403-ed59-490e-9c5f-8d8beeabc616 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.079978] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b9c2b0-6b64-4042-8a13-c9fc6845a520 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.128041] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd1d4ba-7601-4c0f-9e05-8167a568f3e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.140648] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf15e10-0892-47a8-9d6d-0ccfca4ea42e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.145015] env[65503]: DEBUG oslo_vmware.api [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450056, 'name': PowerOnVM_Task, 'duration_secs': 0.509929} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.145346] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 840.145566] env[65503]: INFO nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Took 8.49 seconds to spawn the instance on the hypervisor. [ 840.145765] env[65503]: DEBUG nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 840.146952] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defb9b8b-3e56-4969-9a46-35a49409c36b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.157742] env[65503]: DEBUG nova.compute.provider_tree [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.190963] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.239497] env[65503]: DEBUG nova.network.neutron [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 840.279936] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.634173] env[65503]: DEBUG nova.network.neutron [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Successfully updated port: 12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 840.661603] env[65503]: DEBUG nova.scheduler.client.report [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.673318] env[65503]: INFO nova.compute.manager [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Took 53.43 seconds to build instance. [ 840.743017] env[65503]: INFO nova.compute.manager [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 4c6d9dca-6b62-41ca-a033-2fa9d8d86f69] Took 1.03 seconds to deallocate network for instance. [ 841.070516] env[65503]: DEBUG nova.compute.manager [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Received event network-vif-plugged-12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 841.070781] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.071068] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.071126] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.071454] env[65503]: DEBUG nova.compute.manager [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] No waiting events found dispatching network-vif-plugged-12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 841.071454] env[65503]: WARNING nova.compute.manager [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Received unexpected event network-vif-plugged-12c029b6-d630-419b-8167-53eb6612a069 for instance with vm_state building and task_state spawning. [ 841.071624] env[65503]: DEBUG nova.compute.manager [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Received event network-changed-12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 841.071878] env[65503]: DEBUG nova.compute.manager [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Refreshing instance network info cache due to event network-changed-12c029b6-d630-419b-8167-53eb6612a069. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 841.072145] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Acquiring lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.072336] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Acquired lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.072522] env[65503]: DEBUG nova.network.neutron [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Refreshing network info cache for port 12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 841.137167] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.167547] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.168353] env[65503]: DEBUG nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 841.171344] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.887s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.173065] env[65503]: DEBUG nova.objects.instance [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lazy-loading 'resources' on Instance uuid 585a3d16-ee0c-4b71-9c0d-17b4bc968d09 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 841.174974] env[65503]: DEBUG oslo_concurrency.lockutils [None req-84ab3461-b21d-47c4-b50f-56219a307295 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.938s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.575464] env[65503]: WARNING neutronclient.v2_0.client [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 841.576178] env[65503]: WARNING openstack [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 841.576797] env[65503]: WARNING openstack [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 841.614263] env[65503]: DEBUG nova.network.neutron [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 841.675448] env[65503]: DEBUG nova.compute.utils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 841.679719] env[65503]: DEBUG nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 841.679826] env[65503]: DEBUG nova.network.neutron [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 841.680149] env[65503]: WARNING neutronclient.v2_0.client [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 841.680488] env[65503]: WARNING neutronclient.v2_0.client [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 841.681183] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 841.681388] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 841.688544] env[65503]: DEBUG nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 841.712658] env[65503]: DEBUG nova.network.neutron [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 841.731902] env[65503]: DEBUG nova.policy [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80fb7b449c3641ae9c21524fe9a84a42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa4c261871b043e092592410ee4880ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 841.759399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8fae19a5-4e16-4e98-9764-3fd37735db9b tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "4c6d9dca-6b62-41ca-a033-2fa9d8d86f69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.329s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.041429] env[65503]: DEBUG nova.network.neutron [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Successfully created port: db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 842.185149] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49114896-a1d8-4fa7-b7a4-ea47243d275c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.193555] env[65503]: DEBUG nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 842.199205] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5f0772-9b70-40a5-9457-116ae1ffd9ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.241669] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f0316e3-bb9d-4cdf-a8f7-4e8ae4b6f3da req-99c24a1b-4661-46d3-8683-02b2eb9a458c service nova] Releasing lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.241819] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.242179] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.242465] env[65503]: DEBUG nova.network.neutron [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 842.244997] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf982e02-e542-454f-8484-566f025f926d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.255019] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7706aa-08fb-4d1a-ba61-600b30b6f1d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.273176] env[65503]: DEBUG nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 842.276391] env[65503]: DEBUG nova.compute.provider_tree [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.749411] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 842.749746] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 842.782167] env[65503]: DEBUG nova.scheduler.client.report [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.808009] env[65503]: DEBUG nova.network.neutron [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 842.812400] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.828629] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 842.829034] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 842.898193] env[65503]: WARNING neutronclient.v2_0.client [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.898897] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 842.899250] env[65503]: WARNING openstack [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 843.061815] env[65503]: DEBUG nova.network.neutron [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updating instance_info_cache with network_info: [{"id": "12c029b6-d630-419b-8167-53eb6612a069", "address": "fa:16:3e:47:e5:1b", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12c029b6-d6", "ovs_interfaceid": "12c029b6-d630-419b-8167-53eb6612a069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 843.205937] env[65503]: DEBUG nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 843.232609] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 843.232900] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 843.233068] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 843.233246] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 843.233388] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 843.233527] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 843.233728] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.233881] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 843.234060] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 843.234222] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 843.234391] env[65503]: DEBUG nova.virt.hardware [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 843.235329] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00042678-9632-4ae6-99d9-2534c0399042 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.244441] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c990bd5-64a4-4298-8fc6-4dc3c1043cc9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.292572] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.121s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.295053] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 35.948s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.324597] env[65503]: INFO nova.scheduler.client.report [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleted allocations for instance 585a3d16-ee0c-4b71-9c0d-17b4bc968d09 [ 843.530700] env[65503]: DEBUG nova.compute.manager [req-b4405fec-9b95-4aff-a5ef-f4c1f7b30cc3 req-5e3e2af7-58b6-485b-a9ce-1811c8cf41fe service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Received event network-vif-plugged-db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 843.530700] env[65503]: DEBUG oslo_concurrency.lockutils [req-b4405fec-9b95-4aff-a5ef-f4c1f7b30cc3 req-5e3e2af7-58b6-485b-a9ce-1811c8cf41fe service nova] Acquiring lock "31ee1061-6199-4341-86ab-9ae606b269fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.530700] env[65503]: DEBUG oslo_concurrency.lockutils [req-b4405fec-9b95-4aff-a5ef-f4c1f7b30cc3 req-5e3e2af7-58b6-485b-a9ce-1811c8cf41fe service nova] Lock "31ee1061-6199-4341-86ab-9ae606b269fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.530700] env[65503]: DEBUG oslo_concurrency.lockutils [req-b4405fec-9b95-4aff-a5ef-f4c1f7b30cc3 req-5e3e2af7-58b6-485b-a9ce-1811c8cf41fe service nova] Lock "31ee1061-6199-4341-86ab-9ae606b269fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.530700] env[65503]: DEBUG nova.compute.manager [req-b4405fec-9b95-4aff-a5ef-f4c1f7b30cc3 req-5e3e2af7-58b6-485b-a9ce-1811c8cf41fe service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] No waiting events found dispatching network-vif-plugged-db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 843.530700] env[65503]: WARNING nova.compute.manager [req-b4405fec-9b95-4aff-a5ef-f4c1f7b30cc3 req-5e3e2af7-58b6-485b-a9ce-1811c8cf41fe service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Received unexpected event network-vif-plugged-db676209-2c15-4a3f-9346-21566b03a82d for instance with vm_state building and task_state spawning. [ 843.554020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "bdbae548-eefc-4e59-8053-f4b8e232580d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.554020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.554020] env[65503]: DEBUG nova.compute.manager [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 843.554764] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd34135-3049-4752-a956-9be3d83ee3df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.564083] env[65503]: DEBUG nova.compute.manager [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 843.564696] env[65503]: DEBUG nova.objects.instance [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lazy-loading 'flavor' on Instance uuid bdbae548-eefc-4e59-8053-f4b8e232580d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.566219] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.566538] env[65503]: DEBUG nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Instance network_info: |[{"id": "12c029b6-d630-419b-8167-53eb6612a069", "address": "fa:16:3e:47:e5:1b", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12c029b6-d6", "ovs_interfaceid": "12c029b6-d630-419b-8167-53eb6612a069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 843.567190] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:e5:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12c029b6-d630-419b-8167-53eb6612a069', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.581917] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Creating folder: Project (19e0e62fe31a46fc802dbfc625ac7645). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.582259] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9f57b32-c1a3-4be6-a465-90e9e84ef381 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.599591] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Created folder: Project (19e0e62fe31a46fc802dbfc625ac7645) in parent group-v870190. [ 843.599830] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Creating folder: Instances. Parent ref: group-v870360. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.600106] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51998f28-3e7a-4544-ab53-52915e357517 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.612044] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Created folder: Instances in parent group-v870360. [ 843.612189] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 843.612278] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 843.612528] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2f643bb-9ed6-4024-b009-2af40631bc7c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.628900] env[65503]: DEBUG nova.network.neutron [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Successfully updated port: db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 843.636765] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 843.636765] env[65503]: value = "task-4450059" [ 843.636765] env[65503]: _type = "Task" [ 843.636765] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.645423] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450059, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.798341] env[65503]: DEBUG nova.objects.instance [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lazy-loading 'migration_context' on Instance uuid 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.834295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b005ce30-06ea-49cb-82cb-bc28f40f0e47 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "585a3d16-ee0c-4b71-9c0d-17b4bc968d09" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.179s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.132483] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.132620] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquired lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.132822] env[65503]: DEBUG nova.network.neutron [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 844.152646] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450059, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.575760] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.577252] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4523e80d-deca-45e7-bdcb-efa205509618 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.587101] env[65503]: DEBUG oslo_vmware.api [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 844.587101] env[65503]: value = "task-4450060" [ 844.587101] env[65503]: _type = "Task" [ 844.587101] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.597127] env[65503]: DEBUG oslo_vmware.api [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.636151] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 844.636668] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 844.661029] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450059, 'name': CreateVM_Task, 'duration_secs': 0.61395} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.661381] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.663512] env[65503]: WARNING neutronclient.v2_0.client [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 844.663920] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.664087] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.664393] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 844.664695] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fb4879d-b0b5-4541-96eb-abc1af71a8c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.670590] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 844.670590] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524d2418-65c5-bc76-e7aa-a6cc651a82a3" [ 844.670590] env[65503]: _type = "Task" [ 844.670590] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.687125] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524d2418-65c5-bc76-e7aa-a6cc651a82a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.690790] env[65503]: DEBUG nova.network.neutron [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 844.722464] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 844.723133] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 844.792635] env[65503]: WARNING neutronclient.v2_0.client [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 844.793398] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 844.793776] env[65503]: WARNING openstack [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 844.867402] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf14191-5b18-4c02-81ee-37e57dcdd1f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.883730] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333559bd-1c84-4ae2-ab07-b4b5510247c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.923660] env[65503]: DEBUG nova.network.neutron [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Updating instance_info_cache with network_info: [{"id": "db676209-2c15-4a3f-9346-21566b03a82d", "address": "fa:16:3e:23:cd:62", "network": {"id": "1ed3245b-562b-438a-af8e-42f466762f15", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440007343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa4c261871b043e092592410ee4880ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb676209-2c", "ovs_interfaceid": "db676209-2c15-4a3f-9346-21566b03a82d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 844.925626] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8bbc34-e123-46c1-85e3-cb3a71ba5e39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.935297] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0a4716-7a13-4221-9eaa-bbe86ed3bc24 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.950951] env[65503]: DEBUG nova.compute.provider_tree [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.099038] env[65503]: DEBUG oslo_vmware.api [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450060, 'name': PowerOffVM_Task, 'duration_secs': 0.306227} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.099417] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 845.099561] env[65503]: DEBUG nova.compute.manager [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 845.100325] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568aae64-7eb6-4e8f-b865-71112d043a0e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.183742] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524d2418-65c5-bc76-e7aa-a6cc651a82a3, 'name': SearchDatastore_Task, 'duration_secs': 0.01351} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.184474] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.184474] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.184615] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.184804] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.184941] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.185239] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c77f4bf4-5be5-4538-a9c1-0f4e06ab44a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.210267] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.210916] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.211487] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fa30c0a-41e7-43b4-a61a-ca693f6f24ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.218419] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 845.218419] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d2c6a2-67e2-0e5a-8ce9-9168bc008cfe" [ 845.218419] env[65503]: _type = "Task" [ 845.218419] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.229258] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d2c6a2-67e2-0e5a-8ce9-9168bc008cfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.429270] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Releasing lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.429813] env[65503]: DEBUG nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Instance network_info: |[{"id": "db676209-2c15-4a3f-9346-21566b03a82d", "address": "fa:16:3e:23:cd:62", "network": {"id": "1ed3245b-562b-438a-af8e-42f466762f15", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440007343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa4c261871b043e092592410ee4880ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb676209-2c", "ovs_interfaceid": "db676209-2c15-4a3f-9346-21566b03a82d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 845.430336] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:cd:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db676209-2c15-4a3f-9346-21566b03a82d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.437972] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Creating folder: Project (fa4c261871b043e092592410ee4880ed). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.438375] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aacfbdf8-6e42-47f0-9608-dd09681fa6a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.451306] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Created folder: Project (fa4c261871b043e092592410ee4880ed) in parent group-v870190. [ 845.451511] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Creating folder: Instances. Parent ref: group-v870363. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.451840] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-388a7e6a-d201-4dd5-a62e-863a53480b81 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.454490] env[65503]: DEBUG nova.scheduler.client.report [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.469271] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Created folder: Instances in parent group-v870363. [ 845.469536] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 845.469750] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.469963] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-650688e0-9b23-4d2f-a178-b729c70be0eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.495379] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.495379] env[65503]: value = "task-4450063" [ 845.495379] env[65503]: _type = "Task" [ 845.495379] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.505248] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450063, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.610285] env[65503]: DEBUG nova.compute.manager [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Received event network-changed-db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 845.610567] env[65503]: DEBUG nova.compute.manager [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Refreshing instance network info cache due to event network-changed-db676209-2c15-4a3f-9346-21566b03a82d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 845.610681] env[65503]: DEBUG oslo_concurrency.lockutils [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Acquiring lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.610855] env[65503]: DEBUG oslo_concurrency.lockutils [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Acquired lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.611027] env[65503]: DEBUG nova.network.neutron [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Refreshing network info cache for port db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 845.615286] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c48ae55f-1fae-452f-8ecc-767eecb8eaf1 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.062s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.732390] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d2c6a2-67e2-0e5a-8ce9-9168bc008cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.018062} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.734201] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3ff928e-6485-43fb-9caa-25bb66bf0c4c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.741730] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 845.741730] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526b6197-c9aa-256d-8e06-bbd9c82cf4ee" [ 845.741730] env[65503]: _type = "Task" [ 845.741730] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.752694] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526b6197-c9aa-256d-8e06-bbd9c82cf4ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.801815] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "bc0c0066-b672-4385-8d68-c14e3635af4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.801815] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.005895] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450063, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.115847] env[65503]: WARNING neutronclient.v2_0.client [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 846.116624] env[65503]: WARNING openstack [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 846.116965] env[65503]: WARNING openstack [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 846.216924] env[65503]: DEBUG nova.objects.instance [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lazy-loading 'flavor' on Instance uuid bdbae548-eefc-4e59-8053-f4b8e232580d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.254392] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526b6197-c9aa-256d-8e06-bbd9c82cf4ee, 'name': SearchDatastore_Task, 'duration_secs': 0.016027} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.254392] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.254487] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e4c1c94b-744f-4bed-8e68-3b3f9de7db44/e4c1c94b-744f-4bed-8e68-3b3f9de7db44.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.255138] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fff854d-6e30-4618-b171-3842671490f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.271754] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 846.271754] env[65503]: value = "task-4450064" [ 846.271754] env[65503]: _type = "Task" [ 846.271754] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.279657] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.288553] env[65503]: WARNING openstack [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 846.289887] env[65503]: WARNING openstack [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 846.351760] env[65503]: WARNING neutronclient.v2_0.client [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 846.351879] env[65503]: WARNING openstack [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 846.352232] env[65503]: WARNING openstack [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 846.447242] env[65503]: DEBUG nova.network.neutron [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Updated VIF entry in instance network info cache for port db676209-2c15-4a3f-9346-21566b03a82d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 846.447645] env[65503]: DEBUG nova.network.neutron [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Updating instance_info_cache with network_info: [{"id": "db676209-2c15-4a3f-9346-21566b03a82d", "address": "fa:16:3e:23:cd:62", "network": {"id": "1ed3245b-562b-438a-af8e-42f466762f15", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440007343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa4c261871b043e092592410ee4880ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb676209-2c", "ovs_interfaceid": "db676209-2c15-4a3f-9346-21566b03a82d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 846.469472] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.174s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.475261] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.347s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.476783] env[65503]: INFO nova.compute.claims [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.508417] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450063, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.725597] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.725597] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquired lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.725597] env[65503]: DEBUG nova.network.neutron [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 846.725998] env[65503]: DEBUG nova.objects.instance [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lazy-loading 'info_cache' on Instance uuid bdbae548-eefc-4e59-8053-f4b8e232580d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.784226] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450064, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.950642] env[65503]: DEBUG oslo_concurrency.lockutils [req-7afa2301-0226-4d10-942e-6c7fbec85539 req-51e7282f-a0f4-4830-a4cb-08b537e03d44 service nova] Releasing lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.010939] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450063, 'name': CreateVM_Task, 'duration_secs': 1.401901} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.011134] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.011952] env[65503]: WARNING neutronclient.v2_0.client [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 847.012471] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.012748] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.012995] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 847.013356] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70012d59-0e01-4ef5-b4c3-e757ad099657 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.022468] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 847.022468] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523f3a23-8b20-f5aa-e15b-35861cbe2e35" [ 847.022468] env[65503]: _type = "Task" [ 847.022468] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.035200] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523f3a23-8b20-f5aa-e15b-35861cbe2e35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.229956] env[65503]: DEBUG nova.objects.base [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 847.283072] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.842129} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.283249] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e4c1c94b-744f-4bed-8e68-3b3f9de7db44/e4c1c94b-744f-4bed-8e68-3b3f9de7db44.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.283676] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.283834] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dec317b7-15b8-4872-a748-1faf3168ec88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.291801] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 847.291801] env[65503]: value = "task-4450065" [ 847.291801] env[65503]: _type = "Task" [ 847.291801] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.300880] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.541339] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523f3a23-8b20-f5aa-e15b-35861cbe2e35, 'name': SearchDatastore_Task, 'duration_secs': 0.055794} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.541339] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.541339] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.541339] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.541339] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.541339] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.541339] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c293704-4e8a-4567-a115-dc89bf56910e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.558660] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.558793] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 847.563167] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5c68d73-8943-4d39-9df6-2c10141ce4d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.570964] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 847.570964] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525fb95c-79b3-ca24-173e-96169ce71a1a" [ 847.570964] env[65503]: _type = "Task" [ 847.570964] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.586244] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525fb95c-79b3-ca24-173e-96169ce71a1a, 'name': SearchDatastore_Task, 'duration_secs': 0.010395} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.587234] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c1c4b73-9eab-431e-af9a-457beefb8890 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.597361] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 847.597361] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52048c13-d1b3-1827-cc65-fcb2b5dbb293" [ 847.597361] env[65503]: _type = "Task" [ 847.597361] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.612812] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52048c13-d1b3-1827-cc65-fcb2b5dbb293, 'name': SearchDatastore_Task, 'duration_secs': 0.010334} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.612812] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.612812] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 31ee1061-6199-4341-86ab-9ae606b269fe/31ee1061-6199-4341-86ab-9ae606b269fe.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.613163] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ea17587-cf59-44be-8fa4-d5c8f2640e43 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.621953] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 847.621953] env[65503]: value = "task-4450066" [ 847.621953] env[65503]: _type = "Task" [ 847.621953] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.631688] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.732804] env[65503]: WARNING neutronclient.v2_0.client [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 847.733894] env[65503]: WARNING openstack [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 847.734485] env[65503]: WARNING openstack [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 847.801989] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077466} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.802284] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.803119] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e6956f-8b6d-4a52-8cd9-bb82d8ae2117 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.831543] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] e4c1c94b-744f-4bed-8e68-3b3f9de7db44/e4c1c94b-744f-4bed-8e68-3b3f9de7db44.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.839059] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98c1fa69-9c5d-4746-9454-3f1b29cf61c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.860876] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 847.860876] env[65503]: value = "task-4450067" [ 847.860876] env[65503]: _type = "Task" [ 847.860876] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.870877] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450067, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.875042] env[65503]: WARNING openstack [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 847.875446] env[65503]: WARNING openstack [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 847.959442] env[65503]: WARNING neutronclient.v2_0.client [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 847.960211] env[65503]: WARNING openstack [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 847.960472] env[65503]: WARNING openstack [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 848.019951] env[65503]: INFO nova.compute.manager [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Swapping old allocation on dict_keys(['988ff85a-1d12-41bb-a369-e298e8491ca1']) held by migration 6e2d81c5-8374-425a-bd6e-e2603e1be940 for instance [ 848.055050] env[65503]: DEBUG nova.scheduler.client.report [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Overwriting current allocation {'allocations': {'988ff85a-1d12-41bb-a369-e298e8491ca1': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 86}}, 'project_id': '771d20568f55445088cc06737c184615', 'user_id': '4708fca766f447daa757dbf855ff7d89', 'consumer_generation': 1} on consumer 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 {{(pid=65503) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 848.059495] env[65503]: DEBUG nova.network.neutron [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Updating instance_info_cache with network_info: [{"id": "23e068ca-4763-4b07-a124-fdeee41f5399", "address": "fa:16:3e:92:7b:5e", "network": {"id": "0b0ccb7a-8f7a-4d6c-a69d-9813e51cb43f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1056086899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecc78e2a9434c05bb07afa8e31e918d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e068ca-47", "ovs_interfaceid": "23e068ca-4763-4b07-a124-fdeee41f5399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 848.117933] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd0df19-7caa-4e7c-94d9-326a5d43c832 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.122892] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.139898] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450066, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.143077] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebacb8a7-cc0c-4e49-a1e1-8454c8bffded {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.180605] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.180740] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquired lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.180907] env[65503]: DEBUG nova.network.neutron [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 848.182841] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56002c2-f0d4-4391-a05e-63c128c12395 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.196262] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1da08e2-cc2b-42cc-9cdb-7d7c5be4a01f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.217192] env[65503]: DEBUG nova.compute.provider_tree [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.372065] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450067, 'name': ReconfigVM_Task, 'duration_secs': 0.41612} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.372400] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Reconfigured VM instance instance-0000003c to attach disk [datastore2] e4c1c94b-744f-4bed-8e68-3b3f9de7db44/e4c1c94b-744f-4bed-8e68-3b3f9de7db44.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.373090] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cad1636f-e302-468d-8639-d884fe9095c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.380890] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 848.380890] env[65503]: value = "task-4450068" [ 848.380890] env[65503]: _type = "Task" [ 848.380890] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.391474] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450068, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.564791] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Releasing lock "refresh_cache-bdbae548-eefc-4e59-8053-f4b8e232580d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.635574] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700528} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.635847] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 31ee1061-6199-4341-86ab-9ae606b269fe/31ee1061-6199-4341-86ab-9ae606b269fe.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.636071] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.636338] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a14f10df-6bb9-48b0-9aae-e34c96072516 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.645061] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 848.645061] env[65503]: value = "task-4450069" [ 848.645061] env[65503]: _type = "Task" [ 848.645061] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.653418] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.690294] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.691109] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 848.691490] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 848.699145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.699272] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.699551] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.699636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.699798] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.702219] env[65503]: INFO nova.compute.manager [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Terminating instance [ 848.720826] env[65503]: DEBUG nova.scheduler.client.report [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.797178] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 848.797632] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 848.866344] env[65503]: WARNING neutronclient.v2_0.client [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.867052] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 848.867417] env[65503]: WARNING openstack [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 848.892050] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450068, 'name': Rename_Task, 'duration_secs': 0.152979} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.892447] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 848.892640] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5317a7ab-df28-424f-a520-b7902b00a954 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.901770] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 848.901770] env[65503]: value = "task-4450070" [ 848.901770] env[65503]: _type = "Task" [ 848.901770] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.913733] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.960046] env[65503]: DEBUG nova.network.neutron [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [{"id": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "address": "fa:16:3e:b6:69:8c", "network": {"id": "c4e06a38-0c41-43bb-95c8-5b964dd5f37a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "e49f34d3bf1942dc9c4bf5ee4810f103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633bd812-c5", "ovs_interfaceid": "633bd812-c51f-4ae0-bab2-ced08b56a04b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 849.154326] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071761} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.154614] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.155459] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ce3630-71f9-4168-ac59-f2f3c3dfe1da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.178461] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 31ee1061-6199-4341-86ab-9ae606b269fe/31ee1061-6199-4341-86ab-9ae606b269fe.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.178805] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b044e3f-b7fb-4d30-b500-010677c0befa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.200672] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 849.200672] env[65503]: value = "task-4450071" [ 849.200672] env[65503]: _type = "Task" [ 849.200672] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.210401] env[65503]: DEBUG nova.compute.manager [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 849.210670] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 849.211346] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.212218] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77abebb-7cc1-412a-a83a-d1ce34ac3eee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.220673] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.221020] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f580e471-252e-48f8-b186-00b5f77256c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.226422] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.227159] env[65503]: DEBUG nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 849.231611] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.964s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.231855] env[65503]: DEBUG nova.objects.instance [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 849.234850] env[65503]: DEBUG oslo_vmware.api [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 849.234850] env[65503]: value = "task-4450072" [ 849.234850] env[65503]: _type = "Task" [ 849.234850] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.245583] env[65503]: DEBUG oslo_vmware.api [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4450072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.412798] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450070, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.462992] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Releasing lock "refresh_cache-9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.463724] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.464118] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad8a9f04-3cb0-4470-bae2-ebf93af8d27f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.474311] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 849.474311] env[65503]: value = "task-4450073" [ 849.474311] env[65503]: _type = "Task" [ 849.474311] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.485745] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.569724] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.570084] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8533cc4-ee3c-4e79-92bc-3a7603d7c6ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.579538] env[65503]: DEBUG oslo_vmware.api [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 849.579538] env[65503]: value = "task-4450074" [ 849.579538] env[65503]: _type = "Task" [ 849.579538] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.590989] env[65503]: DEBUG oslo_vmware.api [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.711898] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450071, 'name': ReconfigVM_Task, 'duration_secs': 0.331603} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.712232] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 31ee1061-6199-4341-86ab-9ae606b269fe/31ee1061-6199-4341-86ab-9ae606b269fe.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.712922] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-210a1ada-df14-4c2f-8694-76b18da54ebc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.720514] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 849.720514] env[65503]: value = "task-4450075" [ 849.720514] env[65503]: _type = "Task" [ 849.720514] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.729817] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450075, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.737932] env[65503]: DEBUG nova.compute.utils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.742466] env[65503]: DEBUG nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 849.742737] env[65503]: DEBUG nova.network.neutron [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 849.743091] env[65503]: WARNING neutronclient.v2_0.client [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 849.743409] env[65503]: WARNING neutronclient.v2_0.client [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 849.744023] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 849.744345] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 849.756468] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8f4cd85-23bf-438a-8e00-c7c1267c0302 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.525s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.757469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 40.389s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.765923] env[65503]: DEBUG oslo_vmware.api [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4450072, 'name': PowerOffVM_Task, 'duration_secs': 0.30352} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.765923] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.766250] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.766834] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43ed3f76-4454-49d1-8d97-e3688826ca3a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.804638] env[65503]: DEBUG nova.policy [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6515c96e05437bb9109a90f7014d62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ac332b6234d48748aeffc8507f9df90', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 849.836875] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.837210] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.837465] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Deleting the datastore file [datastore1] f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.838134] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab7eedba-2975-418a-8cc1-07a616f83f56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.848335] env[65503]: DEBUG oslo_vmware.api [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for the task: (returnval){ [ 849.848335] env[65503]: value = "task-4450077" [ 849.848335] env[65503]: _type = "Task" [ 849.848335] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.859867] env[65503]: DEBUG oslo_vmware.api [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4450077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.912919] env[65503]: DEBUG oslo_vmware.api [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450070, 'name': PowerOnVM_Task, 'duration_secs': 0.553687} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.917023] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.917023] env[65503]: INFO nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Took 9.89 seconds to spawn the instance on the hypervisor. [ 849.917023] env[65503]: DEBUG nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 849.917023] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74a243f-2368-4437-a654-8504ed121c2d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.989545] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450073, 'name': PowerOffVM_Task, 'duration_secs': 0.215901} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.989888] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.990707] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:49:18Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e416d5a6-7c96-408f-8f4c-2aff52378276',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1101851334',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 849.990984] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 849.991279] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 849.991608] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 849.991842] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 849.992065] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 849.992459] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.992746] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 849.993985] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 849.993985] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 849.993985] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 849.999747] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54e35448-2f12-461e-80fe-9bfbfd6ddfd3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.019523] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 850.019523] env[65503]: value = "task-4450078" [ 850.019523] env[65503]: _type = "Task" [ 850.019523] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.031648] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.094217] env[65503]: DEBUG oslo_vmware.api [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450074, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.192997] env[65503]: DEBUG nova.network.neutron [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Successfully created port: 3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 850.232156] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450075, 'name': Rename_Task, 'duration_secs': 0.165799} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.232516] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.232783] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0bef7dae-17b8-488d-9292-bf037a5a5bac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.241913] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 850.241913] env[65503]: value = "task-4450079" [ 850.241913] env[65503]: _type = "Task" [ 850.241913] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.242551] env[65503]: DEBUG nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 850.254577] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.361978] env[65503]: DEBUG oslo_vmware.api [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Task: {'id': task-4450077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175313} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.362425] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.362775] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.363054] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.363382] env[65503]: INFO nova.compute.manager [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 850.363687] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 850.363929] env[65503]: DEBUG nova.compute.manager [-] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 850.364044] env[65503]: DEBUG nova.network.neutron [-] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 850.364297] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 850.364954] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 850.365241] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 850.440229] env[65503]: INFO nova.compute.manager [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Took 50.04 seconds to build instance. [ 850.456250] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 850.535142] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450078, 'name': ReconfigVM_Task, 'duration_secs': 0.170312} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.536728] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2848621-dab7-4428-9300-fcc6e6a0a317 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.560923] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:49:18Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e416d5a6-7c96-408f-8f4c-2aff52378276',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1101851334',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 850.561883] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.561883] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 850.561883] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.561883] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 850.562092] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 850.562189] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.562391] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 850.562548] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 850.562769] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 850.562992] env[65503]: DEBUG nova.virt.hardware [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 850.563913] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f169811b-e9c4-4da5-856d-62fb5ff3c2b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.572324] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 850.572324] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520d57dc-e832-9347-a719-56b8e763e3b0" [ 850.572324] env[65503]: _type = "Task" [ 850.572324] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.583307] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520d57dc-e832-9347-a719-56b8e763e3b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009126} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.591855] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfiguring VM instance instance-00000026 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 850.592227] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7359ef28-ca99-49ac-bb0f-62f8b2cda313 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.611530] env[65503]: DEBUG oslo_vmware.api [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450074, 'name': PowerOnVM_Task, 'duration_secs': 0.524092} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.612948] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.613183] env[65503]: DEBUG nova.compute.manager [None req-9376ad1c-d9ab-450f-8829-6a869a4702ca tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 850.613543] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 850.613543] env[65503]: value = "task-4450080" [ 850.613543] env[65503]: _type = "Task" [ 850.613543] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.614304] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3b7e93-55db-432e-ad68-08b9d5b3fae0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.625906] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450080, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.755832] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450079, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.803992] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ad85eef0-cef7-4900-b193-1737a6c2f17b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.804225] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.804400] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 1e92795e-cf30-4175-9e31-c29278f3e9e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.804594] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2ab1cd4b-f2c0-4264-8463-8127a733a1c5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.804758] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 39266117-e82e-48ae-932a-be04b1a7351a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.804918] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 9dbaff4f-ab02-481b-b51f-b134021d277c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.805165] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a197b590-1f74-4241-9579-2f2d3bb89a1d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.805258] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 38e9a714-87f8-422c-9cc5-09b6aec76198 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.805424] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 606b8e9f-67c0-4d5c-85ab-ca35f8b31977 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.805592] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 45a4b511-aa6a-433d-b136-f53686db9575 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.805748] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b6cda94b-2894-4cf0-8522-6593df9723bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.806894] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ff256d3f-af88-4f01-bdfd-cf89e06ab364 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.808103] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a22f589e-7c40-4023-9a4c-9ab2a76faa94 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.808358] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f840b178-fd54-4c84-808c-a14c99a5ecdd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.808604] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 62a18449-7cec-4785-a340-d0450adc8044 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.808769] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a57486e1-82e3-48d5-99fe-c89b300a2136 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.808943] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.809172] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 083797a8-8daf-493b-89de-7ae9137ed538 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.809371] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ad8676f9-0433-49bf-bc72-e36fa010ff1d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.809567] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e74fe378-737a-4732-9a2d-b889a436b8a3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.809748] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bdbae548-eefc-4e59-8053-f4b8e232580d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.809936] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 850.810132] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bcd845e2-5a89-4eef-bb76-33d69834bbc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.810323] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 972a50ed-759a-4312-9314-9bf01a03fc3a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.810499] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e4c1c94b-744f-4bed-8e68-3b3f9de7db44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.810696] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 31ee1061-6199-4341-86ab-9ae606b269fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.810873] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 52701da5-2908-40f8-b1c5-bc30f17d51a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 850.942269] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e1e95323-284c-4dfa-a681-835bba2501f8 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.560s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.128794] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450080, 'name': ReconfigVM_Task, 'duration_secs': 0.246738} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.129493] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfigured VM instance instance-00000026 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 851.131205] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562d8612-f151-4f17-8cf4-b47c0f926052 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.161438] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.166549] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a04e5878-1e92-4ff6-a9f7-72d3da20a243 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.193518] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 851.193518] env[65503]: value = "task-4450081" [ 851.193518] env[65503]: _type = "Task" [ 851.193518] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.204565] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450081, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.255865] env[65503]: DEBUG oslo_vmware.api [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450079, 'name': PowerOnVM_Task, 'duration_secs': 0.636536} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.256129] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.256331] env[65503]: INFO nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Took 8.05 seconds to spawn the instance on the hypervisor. [ 851.256509] env[65503]: DEBUG nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 851.257733] env[65503]: DEBUG nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 851.260231] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bb2132-29ec-48f6-aabc-18f4020f126a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.297731] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 851.298019] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 851.298340] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 851.299084] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 851.299337] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 851.299500] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 851.299728] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.299886] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 851.300072] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 851.300238] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 851.300432] env[65503]: DEBUG nova.virt.hardware [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 851.301376] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ff1697-fee9-4f41-8400-f72372ceb6e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.097813] env[65503]: DEBUG nova.network.neutron [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Successfully updated port: 3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 852.099331] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 429b7542-c288-4a7a-9032-09881938b256 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 852.100518] env[65503]: DEBUG nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 852.102909] env[65503]: DEBUG nova.network.neutron [-] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 852.109536] env[65503]: DEBUG nova.compute.manager [req-015c8432-6658-4150-8b4a-dcf9c98b0da7 req-2d0d0e5d-310f-45d7-a0c2-ad2261631206 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Received event network-vif-deleted-eb97a8c5-41a9-42ff-80fe-382fbcdc440a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 852.109739] env[65503]: INFO nova.compute.manager [req-015c8432-6658-4150-8b4a-dcf9c98b0da7 req-2d0d0e5d-310f-45d7-a0c2-ad2261631206 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Neutron deleted interface eb97a8c5-41a9-42ff-80fe-382fbcdc440a; detaching it from the instance and deleting it from the info cache [ 852.110857] env[65503]: DEBUG nova.network.neutron [req-015c8432-6658-4150-8b4a-dcf9c98b0da7 req-2d0d0e5d-310f-45d7-a0c2-ad2261631206 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 852.127143] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450081, 'name': ReconfigVM_Task, 'duration_secs': 0.692745} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.129712] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecc116a-2e1a-44a3-811f-78163ecad0a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.134715] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51/9809fc8b-3842-4ce3-bb63-8ea37ee3bf51.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.137310] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e407f5-9668-4251-905c-52c59c24cddc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.145078] env[65503]: INFO nova.compute.manager [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Took 48.79 seconds to build instance. [ 852.173105] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2810cd9a-891b-4991-ad0a-9a671ff7eae1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.195461] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed3944c-c8da-4642-af02-a848c5665a82 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.218531] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f01c2f-dd47-4665-b5d2-c70ab26c396d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.227985] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.228300] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8dfbce5-2bd7-4efc-8f82-83d46d9b299c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.236649] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 852.236649] env[65503]: value = "task-4450082" [ 852.236649] env[65503]: _type = "Task" [ 852.236649] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.247204] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.552335] env[65503]: DEBUG nova.compute.manager [req-e89486f7-f213-4f4f-8385-9f47f2fda60d req-be27ac20-6178-43a6-ba82-302a9855277b service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Received event network-vif-plugged-3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 852.552553] env[65503]: DEBUG oslo_concurrency.lockutils [req-e89486f7-f213-4f4f-8385-9f47f2fda60d req-be27ac20-6178-43a6-ba82-302a9855277b service nova] Acquiring lock "52701da5-2908-40f8-b1c5-bc30f17d51a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.552771] env[65503]: DEBUG oslo_concurrency.lockutils [req-e89486f7-f213-4f4f-8385-9f47f2fda60d req-be27ac20-6178-43a6-ba82-302a9855277b service nova] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.552903] env[65503]: DEBUG oslo_concurrency.lockutils [req-e89486f7-f213-4f4f-8385-9f47f2fda60d req-be27ac20-6178-43a6-ba82-302a9855277b service nova] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.553114] env[65503]: DEBUG nova.compute.manager [req-e89486f7-f213-4f4f-8385-9f47f2fda60d req-be27ac20-6178-43a6-ba82-302a9855277b service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] No waiting events found dispatching network-vif-plugged-3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 852.553353] env[65503]: WARNING nova.compute.manager [req-e89486f7-f213-4f4f-8385-9f47f2fda60d req-be27ac20-6178-43a6-ba82-302a9855277b service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Received unexpected event network-vif-plugged-3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 for instance with vm_state building and task_state spawning. [ 852.614170] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 628e67fa-9a28-468f-85ad-990d3f1e5d8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 852.625048] env[65503]: INFO nova.compute.manager [-] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Took 2.26 seconds to deallocate network for instance. [ 852.625048] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "refresh_cache-52701da5-2908-40f8-b1c5-bc30f17d51a0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.625048] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquired lock "refresh_cache-52701da5-2908-40f8-b1c5-bc30f17d51a0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.625048] env[65503]: DEBUG nova.network.neutron [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 852.664249] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.673462] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b96599f-d722-46a0-b644-7db745a2d55d tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "31ee1061-6199-4341-86ab-9ae606b269fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.334s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.678029] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a21e655-fef8-4fc8-a8e0-bdaae56ddd4b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.690849] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42945f72-b7bf-4081-baf8-2c8478b7bf60 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.732328] env[65503]: DEBUG nova.compute.manager [req-015c8432-6658-4150-8b4a-dcf9c98b0da7 req-2d0d0e5d-310f-45d7-a0c2-ad2261631206 service nova] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Detach interface failed, port_id=eb97a8c5-41a9-42ff-80fe-382fbcdc440a, reason: Instance f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 852.748124] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450082, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.129821] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0001f4db-3073-411c-8d60-6d8528ef263a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 853.132748] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 853.133137] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 853.146806] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 708ed8ab-0ec9-457c-966d-b11c55895981 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 853.152588] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.198790] env[65503]: DEBUG nova.network.neutron [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 853.220718] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 853.220718] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 853.251039] env[65503]: DEBUG oslo_vmware.api [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450082, 'name': PowerOnVM_Task, 'duration_secs': 0.962901} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.251039] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.310318] env[65503]: WARNING neutronclient.v2_0.client [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 853.311131] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 853.311593] env[65503]: WARNING openstack [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 853.403853] env[65503]: DEBUG nova.network.neutron [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Updating instance_info_cache with network_info: [{"id": "3e218b5d-d9bb-421d-ac5f-63e3846ba9b4", "address": "fa:16:3e:e4:34:9c", "network": {"id": "72aaa61e-2af7-4316-9523-f540e623c131", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1168023271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ac332b6234d48748aeffc8507f9df90", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e218b5d-d9", "ovs_interfaceid": "3e218b5d-d9bb-421d-ac5f-63e3846ba9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 853.600454] env[65503]: DEBUG nova.compute.manager [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Received event network-changed-db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 853.600705] env[65503]: DEBUG nova.compute.manager [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Refreshing instance network info cache due to event network-changed-db676209-2c15-4a3f-9346-21566b03a82d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 853.600980] env[65503]: DEBUG oslo_concurrency.lockutils [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Acquiring lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.601206] env[65503]: DEBUG oslo_concurrency.lockutils [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Acquired lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.601397] env[65503]: DEBUG nova.network.neutron [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Refreshing network info cache for port db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 853.654015] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance dcc876ae-075f-48d2-81a4-a1b780d6fdec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 853.907954] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Releasing lock "refresh_cache-52701da5-2908-40f8-b1c5-bc30f17d51a0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.907954] env[65503]: DEBUG nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Instance network_info: |[{"id": "3e218b5d-d9bb-421d-ac5f-63e3846ba9b4", "address": "fa:16:3e:e4:34:9c", "network": {"id": "72aaa61e-2af7-4316-9523-f540e623c131", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1168023271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ac332b6234d48748aeffc8507f9df90", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e218b5d-d9", "ovs_interfaceid": "3e218b5d-d9bb-421d-ac5f-63e3846ba9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 853.907954] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:34:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e218b5d-d9bb-421d-ac5f-63e3846ba9b4', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.915991] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Creating folder: Project (3ac332b6234d48748aeffc8507f9df90). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.916601] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bcafce8-7f5d-405a-b317-9fd86288d84b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.931661] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Created folder: Project (3ac332b6234d48748aeffc8507f9df90) in parent group-v870190. [ 853.931882] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Creating folder: Instances. Parent ref: group-v870366. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.932137] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f245ced-b384-491d-9fc3-eada198a9f4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.945916] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Created folder: Instances in parent group-v870366. [ 853.946387] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 853.946492] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.947553] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d95937ee-3143-44fe-9958-ee29e9c1dea7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.969296] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.969296] env[65503]: value = "task-4450085" [ 853.969296] env[65503]: _type = "Task" [ 853.969296] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.977814] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450085, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.104395] env[65503]: WARNING neutronclient.v2_0.client [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 854.105344] env[65503]: WARNING openstack [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 854.105755] env[65503]: WARNING openstack [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 854.157333] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 854.266444] env[65503]: INFO nova.compute.manager [None req-9c5352e2-2260-401c-92f6-ed3ff3ff3257 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance to original state: 'active' [ 854.273048] env[65503]: WARNING openstack [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 854.273048] env[65503]: WARNING openstack [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 854.345741] env[65503]: WARNING neutronclient.v2_0.client [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 854.346667] env[65503]: WARNING openstack [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 854.347175] env[65503]: WARNING openstack [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 854.433552] env[65503]: DEBUG nova.network.neutron [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Updated VIF entry in instance network info cache for port db676209-2c15-4a3f-9346-21566b03a82d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 854.433969] env[65503]: DEBUG nova.network.neutron [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Updating instance_info_cache with network_info: [{"id": "db676209-2c15-4a3f-9346-21566b03a82d", "address": "fa:16:3e:23:cd:62", "network": {"id": "1ed3245b-562b-438a-af8e-42f466762f15", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440007343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa4c261871b043e092592410ee4880ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb676209-2c", "ovs_interfaceid": "db676209-2c15-4a3f-9346-21566b03a82d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 854.480785] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450085, 'name': CreateVM_Task, 'duration_secs': 0.388736} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.480978] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.481504] env[65503]: WARNING neutronclient.v2_0.client [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 854.481863] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.482015] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.482336] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 854.482625] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8c6db2c-b816-426c-a984-9751b745ece5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.489419] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 854.489419] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f924c2-3ec3-a8d2-d76b-167d00fc389c" [ 854.489419] env[65503]: _type = "Task" [ 854.489419] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.500471] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f924c2-3ec3-a8d2-d76b-167d00fc389c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.661360] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 854.788252] env[65503]: DEBUG nova.compute.manager [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Received event network-changed-3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 854.788432] env[65503]: DEBUG nova.compute.manager [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Refreshing instance network info cache due to event network-changed-3e218b5d-d9bb-421d-ac5f-63e3846ba9b4. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 854.788835] env[65503]: DEBUG oslo_concurrency.lockutils [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Acquiring lock "refresh_cache-52701da5-2908-40f8-b1c5-bc30f17d51a0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.788993] env[65503]: DEBUG oslo_concurrency.lockutils [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Acquired lock "refresh_cache-52701da5-2908-40f8-b1c5-bc30f17d51a0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.789233] env[65503]: DEBUG nova.network.neutron [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Refreshing network info cache for port 3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 854.936795] env[65503]: DEBUG oslo_concurrency.lockutils [req-26222881-48e1-44ab-a1cb-771a72cfae19 req-adaf64a7-d021-469e-9ad6-fc42e2f971b9 service nova] Releasing lock "refresh_cache-31ee1061-6199-4341-86ab-9ae606b269fe" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.004624] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f924c2-3ec3-a8d2-d76b-167d00fc389c, 'name': SearchDatastore_Task, 'duration_secs': 0.047733} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.005118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.005246] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.005501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.005633] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.005801] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.006097] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3546c2a-d227-40f7-bf56-d995018fc27d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.016553] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.016748] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.017538] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f57ebd4b-9bd5-4d20-b16a-85ae2d247d38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.026376] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 855.026376] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f63339-9a1a-baef-5f2a-550b03adcae6" [ 855.026376] env[65503]: _type = "Task" [ 855.026376] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.039329] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f63339-9a1a-baef-5f2a-550b03adcae6, 'name': SearchDatastore_Task, 'duration_secs': 0.010342} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.040202] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef723f7-1290-451d-b093-891521bf2cd7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.048824] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 855.048824] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5262fa12-a6d0-bb49-00be-751a7c650835" [ 855.048824] env[65503]: _type = "Task" [ 855.048824] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.059382] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5262fa12-a6d0-bb49-00be-751a7c650835, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.165278] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bc0c0066-b672-4385-8d68-c14e3635af4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 855.165867] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 855.165867] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=100GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] stats={'failed_builds': '1', 'num_instances': '16', 'num_vm_active': '11', 'num_task_None': '10', 'num_os_type_None': '16', 'num_proj_771d20568f55445088cc06737c184615': '2', 'io_workload': '3', 'num_proj_5de0ae091db74426975a523e945110fa': '3', 'num_task_deleting': '1', 'num_proj_9c519075bc624e7b90915354752765da': '1', 'num_proj_cb3b7254cf72404d805209ff11130a1e': '1', 'num_vm_resized': '1', 'num_task_resize_reverting': '1', 'num_proj_c024f22a228f4d2faa4b9316ca53a1ea': '1', 'num_proj_f5a588e741704449878e7a03d7892d11': '1', 'num_proj_34e8cd66745a40d2acebbce98050ee5d': '1', 'num_vm_stopped': '1', 'num_task_powering-on': '1', 'num_proj_5ecc78e2a9434c05bb07afa8e31e918d': '3', 'num_vm_building': '3', 'num_task_spawning': '2', 'num_proj_19e0e62fe31a46fc802dbfc625ac7645': '1', 'num_proj_fa4c261871b043e092592410ee4880ed': '1', 'num_task_networking': '1', 'num_proj_3ac332b6234d48748aeffc8507f9df90': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 855.291931] env[65503]: WARNING neutronclient.v2_0.client [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 855.292870] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 855.293291] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 855.455080] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 855.456195] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 855.537208] env[65503]: WARNING neutronclient.v2_0.client [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 855.537713] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 855.538388] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 855.564581] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5262fa12-a6d0-bb49-00be-751a7c650835, 'name': SearchDatastore_Task, 'duration_secs': 0.01159} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.564581] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.564581] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 52701da5-2908-40f8-b1c5-bc30f17d51a0/52701da5-2908-40f8-b1c5-bc30f17d51a0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 855.564581] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4231e74-8fbe-4644-9ced-e90c628b5720 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.571466] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 855.571466] env[65503]: value = "task-4450086" [ 855.571466] env[65503]: _type = "Task" [ 855.571466] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.584545] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450086, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.642402] env[65503]: DEBUG nova.network.neutron [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Updated VIF entry in instance network info cache for port 3e218b5d-d9bb-421d-ac5f-63e3846ba9b4. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 855.643636] env[65503]: DEBUG nova.network.neutron [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Updating instance_info_cache with network_info: [{"id": "3e218b5d-d9bb-421d-ac5f-63e3846ba9b4", "address": "fa:16:3e:e4:34:9c", "network": {"id": "72aaa61e-2af7-4316-9523-f540e623c131", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1168023271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ac332b6234d48748aeffc8507f9df90", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e218b5d-d9", "ovs_interfaceid": "3e218b5d-d9bb-421d-ac5f-63e3846ba9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 855.818383] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1b1040-5c1d-42f4-9e90-e383892711a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.828423] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84470e2-907f-4038-9651-edd61cdd0f3d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.881290] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac911ce-4d63-4f70-81fb-28794f173e16 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.894410] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ceaa67f-5eef-41e9-8147-a305f40a425c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.917033] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 856.016751] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.017051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.017269] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.017449] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.017614] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.020667] env[65503]: INFO nova.compute.manager [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Terminating instance [ 856.085122] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450086, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.147362] env[65503]: DEBUG oslo_concurrency.lockutils [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Releasing lock "refresh_cache-52701da5-2908-40f8-b1c5-bc30f17d51a0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.148444] env[65503]: DEBUG nova.compute.manager [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Received event network-changed-12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 856.148444] env[65503]: DEBUG nova.compute.manager [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Refreshing instance network info cache due to event network-changed-12c029b6-d630-419b-8167-53eb6612a069. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 856.148444] env[65503]: DEBUG oslo_concurrency.lockutils [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Acquiring lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.148444] env[65503]: DEBUG oslo_concurrency.lockutils [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Acquired lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.148444] env[65503]: DEBUG nova.network.neutron [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Refreshing network info cache for port 12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 856.440942] env[65503]: ERROR nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [req-7d234446-8a6e-426e-a11a-c06f79067703] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7d234446-8a6e-426e-a11a-c06f79067703"}]} [ 856.462585] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 856.480034] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 856.480366] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 856.496851] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 856.525384] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 856.527047] env[65503]: DEBUG nova.compute.manager [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 856.530254] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 856.533107] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6097d9bf-4963-4c91-9ee2-1231cfceeef8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.546219] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 856.546765] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-510c8dd6-85dd-443b-b4d3-dba849842551 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.555623] env[65503]: DEBUG oslo_vmware.api [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 856.555623] env[65503]: value = "task-4450087" [ 856.555623] env[65503]: _type = "Task" [ 856.555623] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.569557] env[65503]: DEBUG oslo_vmware.api [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.592583] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450086, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583614} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.592583] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 52701da5-2908-40f8-b1c5-bc30f17d51a0/52701da5-2908-40f8-b1c5-bc30f17d51a0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 856.592583] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 856.592953] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-765e766f-7828-40c4-988a-432386c647e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.602353] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 856.602353] env[65503]: value = "task-4450088" [ 856.602353] env[65503]: _type = "Task" [ 856.602353] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.617663] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450088, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.651573] env[65503]: WARNING neutronclient.v2_0.client [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 856.652360] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 856.652820] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 856.814799] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 856.815790] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 856.889041] env[65503]: WARNING neutronclient.v2_0.client [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 856.889701] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 856.890050] env[65503]: WARNING openstack [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 856.950706] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "972a50ed-759a-4312-9314-9bf01a03fc3a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.950826] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.951048] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "972a50ed-759a-4312-9314-9bf01a03fc3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.951269] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.951375] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.953920] env[65503]: INFO nova.compute.manager [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Terminating instance [ 856.993341] env[65503]: DEBUG nova.network.neutron [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updated VIF entry in instance network info cache for port 12c029b6-d630-419b-8167-53eb6612a069. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 856.993768] env[65503]: DEBUG nova.network.neutron [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updating instance_info_cache with network_info: [{"id": "12c029b6-d630-419b-8167-53eb6612a069", "address": "fa:16:3e:47:e5:1b", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12c029b6-d6", "ovs_interfaceid": "12c029b6-d630-419b-8167-53eb6612a069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 857.067996] env[65503]: DEBUG oslo_vmware.api [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450087, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.117240] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450088, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.24906} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.117729] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.118758] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e39fd1-2e5a-4731-b2a8-11d6c432571d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.145828] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 52701da5-2908-40f8-b1c5-bc30f17d51a0/52701da5-2908-40f8-b1c5-bc30f17d51a0.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.149494] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce3d6f1c-7358-4a25-b139-c3429a744a23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.173423] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 857.173423] env[65503]: value = "task-4450089" [ 857.173423] env[65503]: _type = "Task" [ 857.173423] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.184169] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450089, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.253618] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df6af38-873a-422c-b0fa-c5c74b1593b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.263144] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a9ce58-b95f-4cb8-b7e2-11bf7cb39bdb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.297762] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b2eb6a-6e75-4da3-af9c-1f4632bc096f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.306766] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6c2df1-af6c-4522-9992-9107217b304b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.324326] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.458487] env[65503]: DEBUG nova.compute.manager [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 857.458770] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.459664] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822e0127-e36b-4dcc-8ccd-1d9d00dd9b23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.468912] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.469426] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd3ea98e-2880-4fe7-8212-158921dcee78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.477274] env[65503]: DEBUG oslo_vmware.api [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 857.477274] env[65503]: value = "task-4450090" [ 857.477274] env[65503]: _type = "Task" [ 857.477274] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.488363] env[65503]: DEBUG oslo_vmware.api [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.497186] env[65503]: DEBUG oslo_concurrency.lockutils [req-5f63e1b9-81e1-4781-82c4-867b215d577e req-f2eadb02-238e-4de7-a06b-4d21b6891da4 service nova] Releasing lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.567959] env[65503]: DEBUG oslo_vmware.api [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450087, 'name': PowerOffVM_Task, 'duration_secs': 0.51592} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.568314] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.568530] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.568906] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28b58fe4-e6bb-4274-802a-b6ef197dd52f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.639246] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.639622] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.639928] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Deleting the datastore file [datastore1] 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.640376] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6da9a773-c521-4e4a-bccc-3e73ea507753 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.650248] env[65503]: DEBUG oslo_vmware.api [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 857.650248] env[65503]: value = "task-4450092" [ 857.650248] env[65503]: _type = "Task" [ 857.650248] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.661893] env[65503]: DEBUG oslo_vmware.api [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.684307] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450089, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.867590] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 857.867835] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 87 to 88 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 857.867997] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.989174] env[65503]: DEBUG oslo_vmware.api [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450090, 'name': PowerOffVM_Task, 'duration_secs': 0.25927} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.989486] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.989704] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.990695] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2969563-c11b-4794-92da-acaf0aa6686d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.056009] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 858.056358] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 858.056643] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleting the datastore file [datastore2] 972a50ed-759a-4312-9314-9bf01a03fc3a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 858.056992] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85ed004c-a4e9-4967-8363-cb6aa7b1719a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.067058] env[65503]: DEBUG oslo_vmware.api [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 858.067058] env[65503]: value = "task-4450094" [ 858.067058] env[65503]: _type = "Task" [ 858.067058] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.075903] env[65503]: DEBUG oslo_vmware.api [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.160820] env[65503]: DEBUG oslo_vmware.api [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220021} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.161111] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.161310] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.161509] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.161708] env[65503]: INFO nova.compute.manager [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Took 1.63 seconds to destroy the instance on the hypervisor. [ 858.161988] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 858.162271] env[65503]: DEBUG nova.compute.manager [-] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 858.162449] env[65503]: DEBUG nova.network.neutron [-] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 858.162693] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.163271] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 858.163538] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 858.184884] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450089, 'name': ReconfigVM_Task, 'duration_secs': 0.545208} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.185188] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 52701da5-2908-40f8-b1c5-bc30f17d51a0/52701da5-2908-40f8-b1c5-bc30f17d51a0.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.185851] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8671596d-591a-452c-bb36-e10d692758f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.193703] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 858.193703] env[65503]: value = "task-4450095" [ 858.193703] env[65503]: _type = "Task" [ 858.193703] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.204109] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450095, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.241523] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.374239] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 858.375556] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.618s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.375691] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.916s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.377336] env[65503]: INFO nova.compute.claims [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.380651] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.381264] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Cleaning up deleted instances {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11916}} [ 858.502123] env[65503]: DEBUG nova.compute.manager [req-8ec67171-3272-45f5-861f-77c106308627 req-3adb4bf1-3842-4d4c-a847-f19283d13ddf service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Received event network-vif-deleted-633bd812-c51f-4ae0-bab2-ced08b56a04b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 858.502381] env[65503]: INFO nova.compute.manager [req-8ec67171-3272-45f5-861f-77c106308627 req-3adb4bf1-3842-4d4c-a847-f19283d13ddf service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Neutron deleted interface 633bd812-c51f-4ae0-bab2-ced08b56a04b; detaching it from the instance and deleting it from the info cache [ 858.502579] env[65503]: DEBUG nova.network.neutron [req-8ec67171-3272-45f5-861f-77c106308627 req-3adb4bf1-3842-4d4c-a847-f19283d13ddf service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 858.577172] env[65503]: DEBUG oslo_vmware.api [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138062} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.577415] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.577581] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.577753] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.577908] env[65503]: INFO nova.compute.manager [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 858.578167] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 858.578354] env[65503]: DEBUG nova.compute.manager [-] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 858.578452] env[65503]: DEBUG nova.network.neutron [-] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 858.578692] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.579236] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 858.579498] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 858.642255] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.705589] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450095, 'name': Rename_Task, 'duration_secs': 0.236555} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.705887] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 858.706207] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5c62ef2-23c5-46be-b970-5f1adced89a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.715833] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 858.715833] env[65503]: value = "task-4450096" [ 858.715833] env[65503]: _type = "Task" [ 858.715833] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.725529] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450096, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.898723] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] There are 35 instances to clean {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11925}} [ 858.899069] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 1bda7a65-0231-4753-9762-43e9b13bd893] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 858.977584] env[65503]: DEBUG nova.network.neutron [-] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 859.006571] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5143cb53-36ed-4fda-a151-6d86aa8950d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.017291] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b350dd2-4f24-4250-bb93-e1e01b4e25b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.054153] env[65503]: DEBUG nova.compute.manager [req-8ec67171-3272-45f5-861f-77c106308627 req-3adb4bf1-3842-4d4c-a847-f19283d13ddf service nova] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Detach interface failed, port_id=633bd812-c51f-4ae0-bab2-ced08b56a04b, reason: Instance 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 859.056615] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "a57486e1-82e3-48d5-99fe-c89b300a2136" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.056836] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.227529] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450096, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.333992] env[65503]: DEBUG nova.network.neutron [-] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 859.405930] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: ea69b3a2-9de2-4cc1-87f6-9b00f2a8cbb4] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 859.481344] env[65503]: INFO nova.compute.manager [-] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Took 1.32 seconds to deallocate network for instance. [ 859.560306] env[65503]: DEBUG nova.compute.utils [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 859.730102] env[65503]: DEBUG oslo_vmware.api [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450096, 'name': PowerOnVM_Task, 'duration_secs': 0.69498} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.730475] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.730720] env[65503]: INFO nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Took 8.47 seconds to spawn the instance on the hypervisor. [ 859.730953] env[65503]: DEBUG nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 859.734833] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9f0ee6-39dd-413b-9f1b-285dd20c7881 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.835906] env[65503]: INFO nova.compute.manager [-] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Took 1.26 seconds to deallocate network for instance. [ 859.883706] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0542cbd4-44f5-4e6c-8ed1-4970259914f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.893363] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b2c3ac-5d76-4bad-a3a9-82fda27c4708 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.927601] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 5cefb589-9947-4fc1-89b4-d888f8c8f644] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 859.931183] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8ace03-8f94-41d3-a62f-45a16556fd3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.939866] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f07322-1f12-4e47-ab51-2f1592b542c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.954421] env[65503]: DEBUG nova.compute.provider_tree [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.988172] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.063344] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.257242] env[65503]: INFO nova.compute.manager [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Took 52.17 seconds to build instance. [ 860.343036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.431390] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 585a3d16-ee0c-4b71-9c0d-17b4bc968d09] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 860.457690] env[65503]: DEBUG nova.scheduler.client.report [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.759247] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e8965d07-04f1-4747-87c7-6d32c1500565 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.620s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.774484] env[65503]: DEBUG nova.compute.manager [req-2dbaff1b-9da6-401a-a791-d2e69cabda9f req-c59275d2-ae4b-4146-931d-4dfd8ee2b79b service nova] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Received event network-vif-deleted-46f89336-5fb0-4852-bdd4-5f314fbda4f9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 860.934900] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f8d61ded-ddf7-4ec9-88e7-92ffb6934733] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 860.964728] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.589s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.965282] env[65503]: DEBUG nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 860.967853] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.758s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.968051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.970404] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.437s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.971847] env[65503]: INFO nova.compute.claims [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.998572] env[65503]: INFO nova.scheduler.client.report [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted allocations for instance 606b8e9f-67c0-4d5c-85ab-ca35f8b31977 [ 861.122855] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "a57486e1-82e3-48d5-99fe-c89b300a2136" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.123968] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.123968] env[65503]: INFO nova.compute.manager [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Attaching volume 23bf6f56-0021-4fa7-800f-3751b9a663f9 to /dev/sdb [ 861.168626] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066cc970-e6b0-43e1-83e6-adab7a91fac6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.177729] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14491b0a-dcfd-4c3d-86cd-184ff06016da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.195605] env[65503]: DEBUG nova.virt.block_device [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Updating existing volume attachment record: 72f4b6d0-621a-4663-980d-a4f15ea0c9fb {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 861.438041] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: ca5962fe-3e41-4fae-8860-90fa7278e0fc] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 861.460091] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "52701da5-2908-40f8-b1c5-bc30f17d51a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.460549] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.460760] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "52701da5-2908-40f8-b1c5-bc30f17d51a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.460943] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.461137] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.465564] env[65503]: INFO nova.compute.manager [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Terminating instance [ 861.477560] env[65503]: DEBUG nova.compute.utils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 861.481890] env[65503]: DEBUG nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 861.482156] env[65503]: DEBUG nova.network.neutron [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 861.482419] env[65503]: WARNING neutronclient.v2_0.client [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 861.482930] env[65503]: WARNING neutronclient.v2_0.client [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 861.483569] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 861.483974] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 861.507485] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9878f1f2-8475-4f4f-8ce4-18acd687c5c6 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "606b8e9f-67c0-4d5c-85ab-ca35f8b31977" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.902s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.533525] env[65503]: DEBUG nova.policy [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8ce61e2ba11481081e910c057daf822', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd4b8037fb204e7ebaf5f34c15096b62', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 861.757399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "9dbaff4f-ab02-481b-b51f-b134021d277c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.757399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.757399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "9dbaff4f-ab02-481b-b51f-b134021d277c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.757399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.757399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.759222] env[65503]: INFO nova.compute.manager [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Terminating instance [ 861.838167] env[65503]: DEBUG nova.network.neutron [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Successfully created port: c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 861.941047] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 7ed036d1-8188-4aab-9d6d-8d7e46147812] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 861.969897] env[65503]: DEBUG nova.compute.manager [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 861.970288] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.971418] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ca439c-1070-4da5-893c-b719719c8c40 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.981210] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.981532] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8435c2a6-7f9c-4d17-9c94-50648134822d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.983898] env[65503]: DEBUG nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 861.999314] env[65503]: DEBUG oslo_vmware.api [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 861.999314] env[65503]: value = "task-4450100" [ 861.999314] env[65503]: _type = "Task" [ 861.999314] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.009934] env[65503]: DEBUG oslo_vmware.api [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.264727] env[65503]: DEBUG nova.compute.manager [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 862.264965] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.265869] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25e9cbf-d7e0-40da-a671-f9f5e70d0a45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.276316] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.276644] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18a77b40-dd81-4b93-a60e-b5e2a0640de6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.285197] env[65503]: DEBUG oslo_vmware.api [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 862.285197] env[65503]: value = "task-4450101" [ 862.285197] env[65503]: _type = "Task" [ 862.285197] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.297319] env[65503]: DEBUG oslo_vmware.api [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.446223] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 75578ccd-2b34-4948-9afa-ac94e9fd8b4b] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 862.506118] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0de8e2-c3ae-48c1-b633-0d4ebe82899b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.517440] env[65503]: DEBUG oslo_vmware.api [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450100, 'name': PowerOffVM_Task, 'duration_secs': 0.218022} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.518462] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987fbbf7-453d-4135-a7ae-11fe650ab7c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.522434] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.522566] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.522800] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c088378c-9918-4801-99f4-11b72cc4924e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.558405] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5325c58-0128-40b7-8489-0a718412e628 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.568217] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baaa0bfd-87c0-44b5-ac67-5545f6095ba0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.587963] env[65503]: DEBUG nova.compute.provider_tree [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.592945] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.592945] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.592945] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Deleting the datastore file [datastore1] 52701da5-2908-40f8-b1c5-bc30f17d51a0 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.592945] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37556f53-6004-4c5b-83f9-5e8914aaf129 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.599964] env[65503]: DEBUG oslo_vmware.api [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for the task: (returnval){ [ 862.599964] env[65503]: value = "task-4450103" [ 862.599964] env[65503]: _type = "Task" [ 862.599964] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.609771] env[65503]: DEBUG oslo_vmware.api [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.795941] env[65503]: DEBUG oslo_vmware.api [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450101, 'name': PowerOffVM_Task, 'duration_secs': 0.260654} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.796256] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.796418] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.796685] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b250b22-caa1-4312-8176-a92260bec477 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.864621] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.864994] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.865246] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleting the datastore file [datastore2] 9dbaff4f-ab02-481b-b51f-b134021d277c {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.865522] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99f778bb-8d75-4e92-ae29-aa8df70806f6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.873483] env[65503]: DEBUG oslo_vmware.api [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 862.873483] env[65503]: value = "task-4450105" [ 862.873483] env[65503]: _type = "Task" [ 862.873483] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.885026] env[65503]: DEBUG oslo_vmware.api [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.949958] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 9acbc312-a3a2-4758-87cd-5576c4f1f8dc] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 862.997394] env[65503]: DEBUG nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 863.030236] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 863.030236] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 863.030236] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 863.030236] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 863.030236] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 863.030236] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 863.030992] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.031429] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 863.031764] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 863.032333] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 863.032701] env[65503]: DEBUG nova.virt.hardware [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 863.034086] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ead892-58dc-428a-a216-549d50ec4241 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.044525] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a722fb23-24c8-49ae-aea1-0429fe02776b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.091006] env[65503]: DEBUG nova.scheduler.client.report [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.110837] env[65503]: DEBUG oslo_vmware.api [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Task: {'id': task-4450103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208201} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.111205] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.111442] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.111645] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.111828] env[65503]: INFO nova.compute.manager [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 863.112075] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 863.112272] env[65503]: DEBUG nova.compute.manager [-] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 863.112366] env[65503]: DEBUG nova.network.neutron [-] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 863.112632] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.113202] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 863.113481] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 863.152880] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.320675] env[65503]: DEBUG nova.compute.manager [req-4895cd1c-03d5-487f-b66f-5f558d1e6acc req-ecec84d6-a967-4d7c-8cd9-b7b98c13521c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received event network-vif-plugged-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 863.320799] env[65503]: DEBUG oslo_concurrency.lockutils [req-4895cd1c-03d5-487f-b66f-5f558d1e6acc req-ecec84d6-a967-4d7c-8cd9-b7b98c13521c service nova] Acquiring lock "429b7542-c288-4a7a-9032-09881938b256-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.321100] env[65503]: DEBUG oslo_concurrency.lockutils [req-4895cd1c-03d5-487f-b66f-5f558d1e6acc req-ecec84d6-a967-4d7c-8cd9-b7b98c13521c service nova] Lock "429b7542-c288-4a7a-9032-09881938b256-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.321649] env[65503]: DEBUG oslo_concurrency.lockutils [req-4895cd1c-03d5-487f-b66f-5f558d1e6acc req-ecec84d6-a967-4d7c-8cd9-b7b98c13521c service nova] Lock "429b7542-c288-4a7a-9032-09881938b256-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.321649] env[65503]: DEBUG nova.compute.manager [req-4895cd1c-03d5-487f-b66f-5f558d1e6acc req-ecec84d6-a967-4d7c-8cd9-b7b98c13521c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] No waiting events found dispatching network-vif-plugged-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 863.321801] env[65503]: WARNING nova.compute.manager [req-4895cd1c-03d5-487f-b66f-5f558d1e6acc req-ecec84d6-a967-4d7c-8cd9-b7b98c13521c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received unexpected event network-vif-plugged-c56d3579-9224-4ab6-8078-adc2f2c2803b for instance with vm_state building and task_state spawning. [ 863.386857] env[65503]: DEBUG oslo_vmware.api [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153626} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.387230] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.387441] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.387656] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.387730] env[65503]: INFO nova.compute.manager [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 863.387922] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 863.388210] env[65503]: DEBUG nova.compute.manager [-] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 863.388210] env[65503]: DEBUG nova.network.neutron [-] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 863.389029] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.389029] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 863.389238] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 863.407653] env[65503]: DEBUG nova.network.neutron [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Successfully updated port: c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 863.454530] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 34008711-b51b-467b-b972-bfda1023d696] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 863.475259] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.492934] env[65503]: DEBUG nova.compute.manager [req-0a127b4c-bb69-4429-b3d3-a4e4eee92311 req-6da210ae-98b5-4115-9ead-bddcb0af8eaa service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Received event network-vif-deleted-3e218b5d-d9bb-421d-ac5f-63e3846ba9b4 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 863.492934] env[65503]: INFO nova.compute.manager [req-0a127b4c-bb69-4429-b3d3-a4e4eee92311 req-6da210ae-98b5-4115-9ead-bddcb0af8eaa service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Neutron deleted interface 3e218b5d-d9bb-421d-ac5f-63e3846ba9b4; detaching it from the instance and deleting it from the info cache [ 863.492934] env[65503]: DEBUG nova.network.neutron [req-0a127b4c-bb69-4429-b3d3-a4e4eee92311 req-6da210ae-98b5-4115-9ead-bddcb0af8eaa service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 863.596159] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.596844] env[65503]: DEBUG nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 863.600213] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.552s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.600213] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.601845] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.355s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.602050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.604329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.950s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.604512] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.606262] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.023s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.606445] env[65503]: DEBUG nova.objects.instance [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 863.637537] env[65503]: INFO nova.scheduler.client.report [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted allocations for instance a22f589e-7c40-4023-9a4c-9ab2a76faa94 [ 863.643626] env[65503]: INFO nova.scheduler.client.report [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted allocations for instance ad8676f9-0433-49bf-bc72-e36fa010ff1d [ 863.655308] env[65503]: INFO nova.scheduler.client.report [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted allocations for instance 083797a8-8daf-493b-89de-7ae9137ed538 [ 863.881676] env[65503]: DEBUG nova.network.neutron [-] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 863.911301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.911508] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquired lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.911678] env[65503]: DEBUG nova.network.neutron [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 863.958959] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 0c0c6d3e-f4d2-458f-aa69-19f87a37f162] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 863.995894] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe48b874-7768-46ef-a89a-fb6675ccbc8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.009970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771475b2-8f5b-4406-9ab1-7386586c8036 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.048284] env[65503]: DEBUG nova.compute.manager [req-0a127b4c-bb69-4429-b3d3-a4e4eee92311 req-6da210ae-98b5-4115-9ead-bddcb0af8eaa service nova] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Detach interface failed, port_id=3e218b5d-d9bb-421d-ac5f-63e3846ba9b4, reason: Instance 52701da5-2908-40f8-b1c5-bc30f17d51a0 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 864.114281] env[65503]: DEBUG nova.compute.utils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 864.114281] env[65503]: DEBUG nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 864.114281] env[65503]: DEBUG nova.network.neutron [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 864.114669] env[65503]: WARNING neutronclient.v2_0.client [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 864.114749] env[65503]: WARNING neutronclient.v2_0.client [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 864.115842] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 864.115842] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 864.127472] env[65503]: DEBUG nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 864.151079] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7784b808-7daf-46f8-af8e-73e8cb336cfa tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "a22f589e-7c40-4023-9a4c-9ab2a76faa94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.198s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.153046] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d5b11c35-e4e6-45b5-a6c4-841ea251b31e tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "ad8676f9-0433-49bf-bc72-e36fa010ff1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.652s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.166460] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1baeb25e-2556-4dbc-8c14-2bdaf4197beb tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "083797a8-8daf-493b-89de-7ae9137ed538" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.890s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.203316] env[65503]: DEBUG nova.policy [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55db708d2a9b47baa25cafed2be1ba91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '463e93d05e1e4b27a3dc866a5b1991d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 864.218069] env[65503]: DEBUG nova.network.neutron [-] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 864.385620] env[65503]: INFO nova.compute.manager [-] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Took 1.27 seconds to deallocate network for instance. [ 864.414983] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 864.415427] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 864.451212] env[65503]: DEBUG nova.network.neutron [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 864.462618] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 8f0bf665-b21b-42ed-816d-69dee2f40654] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 864.481461] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 864.481568] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 864.511820] env[65503]: DEBUG nova.network.neutron [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Successfully created port: bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 864.584103] env[65503]: WARNING neutronclient.v2_0.client [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 864.584791] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 864.585160] env[65503]: WARNING openstack [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 864.631901] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e2b2ea5d-1fda-437e-a259-e4c3771f5d50 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.636795] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.545s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.642461] env[65503]: INFO nova.compute.claims [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.711388] env[65503]: DEBUG nova.network.neutron [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 864.720440] env[65503]: INFO nova.compute.manager [-] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Took 1.33 seconds to deallocate network for instance. [ 864.893891] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.972748] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 5d3ae7cd-11c3-4aa8-9ac5-07606e200bb1] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 865.150947] env[65503]: DEBUG nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 865.198315] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 865.198779] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.199070] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 865.199387] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.199717] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 865.200064] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 865.200424] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.200728] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 865.201032] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 865.203018] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 865.203018] env[65503]: DEBUG nova.virt.hardware [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 865.203018] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee045c4-1de9-4b19-8fc3-8251c04429b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.213671] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff5c3c0-7237-48aa-b4b4-c91146c2d200 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.217716] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Releasing lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.217893] env[65503]: DEBUG nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Instance network_info: |[{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 865.218633] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:3f:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c56d3579-9224-4ab6-8078-adc2f2c2803b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.226894] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Creating folder: Project (cd4b8037fb204e7ebaf5f34c15096b62). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.228043] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.228150] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e03c5d6-834d-46c9-ad88-7fee6b100643 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.248965] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Created folder: Project (cd4b8037fb204e7ebaf5f34c15096b62) in parent group-v870190. [ 865.249221] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Creating folder: Instances. Parent ref: group-v870371. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.249688] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-859124af-33af-4b47-8444-0096f0698ca3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.259694] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Created folder: Instances in parent group-v870371. [ 865.259937] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 865.260187] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 429b7542-c288-4a7a-9032-09881938b256] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.260421] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-732d6739-0fcc-4606-928e-b607ccaa4e54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.281544] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.281544] env[65503]: value = "task-4450109" [ 865.281544] env[65503]: _type = "Task" [ 865.281544] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.291635] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450109, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.464273] env[65503]: DEBUG nova.compute.manager [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 865.464552] env[65503]: DEBUG nova.compute.manager [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing instance network info cache due to event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 865.464813] env[65503]: DEBUG oslo_concurrency.lockutils [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Acquiring lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.465101] env[65503]: DEBUG oslo_concurrency.lockutils [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Acquired lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.465319] env[65503]: DEBUG nova.network.neutron [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 865.477029] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 80cf5690-8a18-471a-b02f-3b7b9e539c0d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 865.527404] env[65503]: DEBUG nova.compute.manager [req-93ba88c4-3b3f-438d-8cda-c8480cb39b4c req-0d2c6b9d-426f-44dc-829a-7266baf8e343 service nova] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Received event network-vif-deleted-0349cd73-74a3-45ee-9582-091d2fe091f9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 865.756459] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.756459] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.756459] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 865.756459] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870370', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'name': 'volume-23bf6f56-0021-4fa7-800f-3751b9a663f9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a57486e1-82e3-48d5-99fe-c89b300a2136', 'attached_at': '', 'detached_at': '', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'serial': '23bf6f56-0021-4fa7-800f-3751b9a663f9'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 865.756972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47866c32-31f3-4374-8b51-5723e0a71e02 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.783484] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bed56e-a78b-4f30-9962-14eed39d2076 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.797346] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450109, 'name': CreateVM_Task, 'duration_secs': 0.403963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.811235] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 429b7542-c288-4a7a-9032-09881938b256] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.820506] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] volume-23bf6f56-0021-4fa7-800f-3751b9a663f9/volume-23bf6f56-0021-4fa7-800f-3751b9a663f9.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.825484] env[65503]: WARNING neutronclient.v2_0.client [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 865.826280] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.826280] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.826636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 865.827066] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-907e9212-3598-4eb8-91e4-928afd4909e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.844180] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d2d6501-8343-426c-a86d-29d1ef431094 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.846675] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "236e651f-6a27-4601-8a76-ca1619e32dc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.846924] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.854359] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 865.854359] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea7d6c-da00-e0dd-4cf0-b90e72fd2a29" [ 865.854359] env[65503]: _type = "Task" [ 865.854359] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.855970] env[65503]: DEBUG oslo_vmware.api [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 865.855970] env[65503]: value = "task-4450110" [ 865.855970] env[65503]: _type = "Task" [ 865.855970] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.872832] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ea7d6c-da00-e0dd-4cf0-b90e72fd2a29, 'name': SearchDatastore_Task, 'duration_secs': 0.010782} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.876412] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.876667] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 865.877041] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.877041] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.877253] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 865.877596] env[65503]: DEBUG oslo_vmware.api [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450110, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.880880] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93013072-555c-418f-b095-b7ffb1ea7f93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.890470] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 865.890678] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 865.891447] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14330e13-685f-4d9d-942b-0e2de1153a87 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.901311] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 865.901311] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e33a83-703d-8919-da6a-7650616f6d7c" [ 865.901311] env[65503]: _type = "Task" [ 865.901311] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.910642] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e33a83-703d-8919-da6a-7650616f6d7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.968632] env[65503]: WARNING neutronclient.v2_0.client [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 865.969319] env[65503]: WARNING openstack [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 865.969667] env[65503]: WARNING openstack [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 865.979111] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 3ac287b4-2538-472b-84ac-7fed3c2ffff3] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 866.115053] env[65503]: DEBUG nova.network.neutron [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Successfully updated port: bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 866.138631] env[65503]: WARNING openstack [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 866.139077] env[65503]: WARNING openstack [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 866.207650] env[65503]: WARNING neutronclient.v2_0.client [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.208336] env[65503]: WARNING openstack [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 866.208692] env[65503]: WARNING openstack [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 866.261313] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 866.284672] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071f8614-433e-47fb-a3fb-a964de481e17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.298292] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805e2f2c-7479-48b6-ab28-81402a6dfc45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.333424] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938fcaa6-6878-4115-8d4e-8ca27430a2e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.340865] env[65503]: DEBUG nova.network.neutron [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updated VIF entry in instance network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 866.341255] env[65503]: DEBUG nova.network.neutron [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 866.345866] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2aa2bc2-ea15-40f2-ad6d-7ebeb0dfc8b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.361072] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 866.363852] env[65503]: DEBUG nova.compute.provider_tree [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.375415] env[65503]: DEBUG oslo_vmware.api [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450110, 'name': ReconfigVM_Task, 'duration_secs': 0.393038} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.375688] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Reconfigured VM instance instance-00000034 to attach disk [datastore1] volume-23bf6f56-0021-4fa7-800f-3751b9a663f9/volume-23bf6f56-0021-4fa7-800f-3751b9a663f9.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.380590] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ed7a70e-20aa-4cdb-9962-fd66b50c4502 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.397140] env[65503]: DEBUG oslo_vmware.api [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 866.397140] env[65503]: value = "task-4450111" [ 866.397140] env[65503]: _type = "Task" [ 866.397140] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.409501] env[65503]: DEBUG oslo_vmware.api [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450111, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.414953] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e33a83-703d-8919-da6a-7650616f6d7c, 'name': SearchDatastore_Task, 'duration_secs': 0.010926} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.416036] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b749d4b-12c5-4cdb-92db-481bacabb5b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.423586] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 866.423586] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c6f09-e8c1-2fe6-2c9d-2c32b86c6429" [ 866.423586] env[65503]: _type = "Task" [ 866.423586] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.432031] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c6f09-e8c1-2fe6-2c9d-2c32b86c6429, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.482040] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: fd548bd7-b686-43ef-83a7-c40addf8ba75] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 866.616780] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.617876] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.617876] env[65503]: DEBUG nova.network.neutron [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 866.786305] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.846324] env[65503]: DEBUG oslo_concurrency.lockutils [req-3617fe33-2fff-45ee-a4e0-b1aa56eb12e3 req-a4fde7f9-d1ed-4c9d-b07a-0a55bfe2171c service nova] Releasing lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.868930] env[65503]: DEBUG nova.scheduler.client.report [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.886899] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.908916] env[65503]: DEBUG oslo_vmware.api [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450111, 'name': ReconfigVM_Task, 'duration_secs': 0.139513} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.909256] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870370', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'name': 'volume-23bf6f56-0021-4fa7-800f-3751b9a663f9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a57486e1-82e3-48d5-99fe-c89b300a2136', 'attached_at': '', 'detached_at': '', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'serial': '23bf6f56-0021-4fa7-800f-3751b9a663f9'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 866.934508] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c6f09-e8c1-2fe6-2c9d-2c32b86c6429, 'name': SearchDatastore_Task, 'duration_secs': 0.009898} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.935634] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.935634] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 429b7542-c288-4a7a-9032-09881938b256/429b7542-c288-4a7a-9032-09881938b256.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 866.935634] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85fc329b-146b-4449-a207-0dffbd53426c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.944647] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 866.944647] env[65503]: value = "task-4450112" [ 866.944647] env[65503]: _type = "Task" [ 866.944647] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.953361] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.986536] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f7b81948-c480-47a4-9d0f-5c2c163bd7f2] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 867.123538] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.124042] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.164015] env[65503]: DEBUG nova.network.neutron [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 867.187203] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.187628] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.263010] env[65503]: WARNING neutronclient.v2_0.client [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 867.263762] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.264136] env[65503]: WARNING openstack [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.368328] env[65503]: DEBUG nova.network.neutron [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Updating instance_info_cache with network_info: [{"id": "bf6fff36-e288-4f03-b705-1d02e2d90395", "address": "fa:16:3e:e9:41:4a", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf6fff36-e2", "ovs_interfaceid": "bf6fff36-e288-4f03-b705-1d02e2d90395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 867.374632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.738s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.375162] env[65503]: DEBUG nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 867.377823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.628s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.379381] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.381142] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.302s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.384088] env[65503]: INFO nova.compute.claims [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.420194] env[65503]: INFO nova.scheduler.client.report [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Deleted allocations for instance 2ab1cd4b-f2c0-4264-8463-8127a733a1c5 [ 867.456244] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450112, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480343} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.456545] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 429b7542-c288-4a7a-9032-09881938b256/429b7542-c288-4a7a-9032-09881938b256.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 867.456780] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 867.457059] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06288043-5730-4efe-b2ae-83b78bfe1456 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.464555] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 867.464555] env[65503]: value = "task-4450113" [ 867.464555] env[65503]: _type = "Task" [ 867.464555] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.475664] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.490155] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 8c274097-234a-44be-9159-c2fb0f1a8da1] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 867.613052] env[65503]: DEBUG nova.compute.manager [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Received event network-vif-plugged-bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 867.613052] env[65503]: DEBUG oslo_concurrency.lockutils [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Acquiring lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.613347] env[65503]: DEBUG oslo_concurrency.lockutils [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.613642] env[65503]: DEBUG oslo_concurrency.lockutils [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.613945] env[65503]: DEBUG nova.compute.manager [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] No waiting events found dispatching network-vif-plugged-bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 867.614224] env[65503]: WARNING nova.compute.manager [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Received unexpected event network-vif-plugged-bf6fff36-e288-4f03-b705-1d02e2d90395 for instance with vm_state building and task_state spawning. [ 867.614455] env[65503]: DEBUG nova.compute.manager [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Received event network-changed-bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 867.614673] env[65503]: DEBUG nova.compute.manager [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Refreshing instance network info cache due to event network-changed-bf6fff36-e288-4f03-b705-1d02e2d90395. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 867.614926] env[65503]: DEBUG oslo_concurrency.lockutils [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Acquiring lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.871868] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.872310] env[65503]: DEBUG nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Instance network_info: |[{"id": "bf6fff36-e288-4f03-b705-1d02e2d90395", "address": "fa:16:3e:e9:41:4a", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf6fff36-e2", "ovs_interfaceid": "bf6fff36-e288-4f03-b705-1d02e2d90395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 867.872701] env[65503]: DEBUG oslo_concurrency.lockutils [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Acquired lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.872873] env[65503]: DEBUG nova.network.neutron [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Refreshing network info cache for port bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 867.874412] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:41:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf6fff36-e288-4f03-b705-1d02e2d90395', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.881845] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 867.883117] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.883407] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e7bc2da-db57-42fa-bad1-5f1c65fda0bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.900281] env[65503]: DEBUG nova.compute.utils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 867.903652] env[65503]: DEBUG nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 867.911257] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.911257] env[65503]: value = "task-4450114" [ 867.911257] env[65503]: _type = "Task" [ 867.911257] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.922316] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450114, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.927955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-52a1edc3-f486-416c-85e7-c6b56489440e tempest-ListImageFiltersTestJSON-1091573721 tempest-ListImageFiltersTestJSON-1091573721-project-member] Lock "2ab1cd4b-f2c0-4264-8463-8127a733a1c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.298s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.967173] env[65503]: DEBUG nova.objects.instance [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'flavor' on Instance uuid a57486e1-82e3-48d5-99fe-c89b300a2136 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.982908] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080908} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.983709] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.984525] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a26549-03c0-4f66-94a3-b8c7990fafec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.001131] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: ab09cfe5-8257-462b-9ebf-87081d5793ac] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 868.012468] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 429b7542-c288-4a7a-9032-09881938b256/429b7542-c288-4a7a-9032-09881938b256.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.013352] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c24855f5-ebfb-4a25-b803-fb75ee4b839f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.037792] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 868.037792] env[65503]: value = "task-4450115" [ 868.037792] env[65503]: _type = "Task" [ 868.037792] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.050163] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450115, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.383888] env[65503]: WARNING neutronclient.v2_0.client [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 868.385270] env[65503]: WARNING openstack [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 868.385398] env[65503]: WARNING openstack [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 868.405164] env[65503]: DEBUG nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 868.423800] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450114, 'name': CreateVM_Task, 'duration_secs': 0.478386} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.424072] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.424670] env[65503]: WARNING neutronclient.v2_0.client [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 868.426550] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.426550] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.426550] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.426550] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ef1c4f6-c681-4b23-9567-705de2f62bab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.439254] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 868.439254] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b9c755-6f72-cace-fe92-7e978adac8df" [ 868.439254] env[65503]: _type = "Task" [ 868.439254] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.460044] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b9c755-6f72-cace-fe92-7e978adac8df, 'name': SearchDatastore_Task, 'duration_secs': 0.021063} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.461275] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.461576] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.461867] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.462140] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.462372] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.463295] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d49914fa-77ee-45cb-99ab-91a8f20ab8c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.478900] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f9bc2bcc-7f01-41cc-9298-d80dcaeb0190 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.356s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.481456] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.481727] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.486032] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95cdc00a-2295-4102-bdfd-b3fa74ac5690 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.494685] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 868.494685] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52811325-bf4e-03b5-6084-5abee03f0955" [ 868.494685] env[65503]: _type = "Task" [ 868.494685] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.504100] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52811325-bf4e-03b5-6084-5abee03f0955, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.515964] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 24e054d7-7662-47ef-8f69-4738c5ff9548] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 868.549386] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450115, 'name': ReconfigVM_Task, 'duration_secs': 0.436845} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.550314] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 429b7542-c288-4a7a-9032-09881938b256/429b7542-c288-4a7a-9032-09881938b256.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.551061] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-337b1e0c-7ac4-40b1-9bed-753e3337f162 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.559327] env[65503]: WARNING openstack [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 868.559729] env[65503]: WARNING openstack [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 868.572096] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 868.572096] env[65503]: value = "task-4450116" [ 868.572096] env[65503]: _type = "Task" [ 868.572096] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.577815] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450116, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.628385] env[65503]: WARNING neutronclient.v2_0.client [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 868.629087] env[65503]: WARNING openstack [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 868.629445] env[65503]: WARNING openstack [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 868.755572] env[65503]: DEBUG nova.network.neutron [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Updated VIF entry in instance network info cache for port bf6fff36-e288-4f03-b705-1d02e2d90395. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 868.755572] env[65503]: DEBUG nova.network.neutron [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Updating instance_info_cache with network_info: [{"id": "bf6fff36-e288-4f03-b705-1d02e2d90395", "address": "fa:16:3e:e9:41:4a", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf6fff36-e2", "ovs_interfaceid": "bf6fff36-e288-4f03-b705-1d02e2d90395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 868.991661] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76398c9b-4552-4cd2-bbeb-99e4eed05d57 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.003805] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bff242-85c0-460b-889a-86edc1af7d38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.013369] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52811325-bf4e-03b5-6084-5abee03f0955, 'name': SearchDatastore_Task, 'duration_secs': 0.016022} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.014021] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8e49cd1-9bb3-4111-b35f-ec763bc0da1a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.044556] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 16d508f9-72f0-4853-92fb-c8c7a37b5668] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 869.049127] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022ca999-4952-4435-98be-d665b6d92a9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.053325] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 869.053325] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529406cb-08df-0e51-4da8-7c29104859dd" [ 869.053325] env[65503]: _type = "Task" [ 869.053325] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.062725] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f0641a-f9e6-459a-9e4a-968bfb82fc67 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.073218] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529406cb-08df-0e51-4da8-7c29104859dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.084395] env[65503]: DEBUG nova.compute.provider_tree [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.094126] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450116, 'name': Rename_Task, 'duration_secs': 0.147877} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.094273] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.094544] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19a0aeed-7c08-4a12-b42b-9b36ecccbd4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.104364] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 869.104364] env[65503]: value = "task-4450117" [ 869.104364] env[65503]: _type = "Task" [ 869.104364] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.115442] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.258631] env[65503]: DEBUG oslo_concurrency.lockutils [req-17be01c2-5be9-420b-99d4-ad9db6d0fbbb req-fd6add9e-927b-4e9c-b3a0-73363566c096 service nova] Releasing lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.418244] env[65503]: DEBUG nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 869.443471] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 869.443766] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 869.443890] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 869.444092] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 869.444237] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 869.444380] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 869.444586] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 869.444738] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 869.444899] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 869.445075] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 869.445251] env[65503]: DEBUG nova.virt.hardware [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 869.446143] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891db20e-7c1d-4e11-a02e-0b145c3e5ef2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.456093] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940759c5-7090-4361-9c17-004427581289 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.474249] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.479579] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Creating folder: Project (090f0af574f940078f7c3ca39dbb0777). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.479953] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f69bd136-5119-4bd1-bee1-5b50798e1cf6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.493168] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Created folder: Project (090f0af574f940078f7c3ca39dbb0777) in parent group-v870190. [ 869.493355] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Creating folder: Instances. Parent ref: group-v870375. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.493552] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af4835a8-72db-4459-8056-91348a407e32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.506232] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Created folder: Instances in parent group-v870375. [ 869.506489] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 869.506697] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.506911] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91d770d1-79e0-4ba8-a0d8-889ac279845c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.524461] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.524461] env[65503]: value = "task-4450120" [ 869.524461] env[65503]: _type = "Task" [ 869.524461] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.532973] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450120, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.549951] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: d8d917c1-224c-4773-a911-d09f3f719e1b] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 869.565792] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529406cb-08df-0e51-4da8-7c29104859dd, 'name': SearchDatastore_Task, 'duration_secs': 0.014457} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.566118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.566510] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 628e67fa-9a28-468f-85ad-990d3f1e5d8c/628e67fa-9a28-468f-85ad-990d3f1e5d8c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.566712] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-995f8904-29d7-4ab4-94e4-cab8be832682 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.574445] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 869.574445] env[65503]: value = "task-4450121" [ 869.574445] env[65503]: _type = "Task" [ 869.574445] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.585650] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.591022] env[65503]: DEBUG nova.scheduler.client.report [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.646520] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450117, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.036107] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450120, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.053517] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 174c806e-c2e8-4064-8800-d4a35c19f5e6] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 870.088505] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450121, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47885} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.088505] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 628e67fa-9a28-468f-85ad-990d3f1e5d8c/628e67fa-9a28-468f-85ad-990d3f1e5d8c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.088505] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.088505] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffc465a0-8951-454b-9b93-dc8d3470a395 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.094118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.094723] env[65503]: DEBUG nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 870.099207] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.491s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.099461] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.103560] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.751s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.103560] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.104503] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.386s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.104686] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.106450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.223s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.106625] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.108298] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.556s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.109842] env[65503]: INFO nova.compute.claims [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.112862] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 870.112862] env[65503]: value = "task-4450122" [ 870.112862] env[65503]: _type = "Task" [ 870.112862] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.131372] env[65503]: DEBUG oslo_vmware.api [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450117, 'name': PowerOnVM_Task, 'duration_secs': 0.553043} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.131931] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450122, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.132216] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.132412] env[65503]: INFO nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Took 7.14 seconds to spawn the instance on the hypervisor. [ 870.132628] env[65503]: DEBUG nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 870.133509] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8d27b0-36da-4c96-845b-ddd03175184d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.141517] env[65503]: INFO nova.scheduler.client.report [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Deleted allocations for instance e74fe378-737a-4732-9a2d-b889a436b8a3 [ 870.152725] env[65503]: INFO nova.scheduler.client.report [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted allocations for instance a197b590-1f74-4241-9579-2f2d3bb89a1d [ 870.178234] env[65503]: INFO nova.scheduler.client.report [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Deleted allocations for instance 45a4b511-aa6a-433d-b136-f53686db9575 [ 870.188216] env[65503]: INFO nova.scheduler.client.report [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleted allocations for instance e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf [ 870.328399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "909b3535-9410-4820-a34d-6c0e9627f506" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.328664] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "909b3535-9410-4820-a34d-6c0e9627f506" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.536978] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450120, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.557051] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 85d0ed1d-6306-4999-832b-f4e69233fec7] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 870.617763] env[65503]: DEBUG nova.compute.utils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 870.619245] env[65503]: DEBUG nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 870.619451] env[65503]: DEBUG nova.network.neutron [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 870.619760] env[65503]: WARNING neutronclient.v2_0.client [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 870.620061] env[65503]: WARNING neutronclient.v2_0.client [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 870.620775] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 870.621214] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 870.634600] env[65503]: DEBUG nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 870.645026] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072384} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.645304] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.646262] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086fbf0c-8d25-4870-ad0d-00f6b7ae6c7e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.681913] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 628e67fa-9a28-468f-85ad-990d3f1e5d8c/628e67fa-9a28-468f-85ad-990d3f1e5d8c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.682724] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b1c4322d-f3be-4764-b048-2b05934f1f9b tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "a197b590-1f74-4241-9579-2f2d3bb89a1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.275s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.685639] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d3f9b460-8c90-433b-a68d-d749dcb9fe17 tempest-ServerShowV254Test-1465370972 tempest-ServerShowV254Test-1465370972-project-member] Lock "e74fe378-737a-4732-9a2d-b889a436b8a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.705s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.686617] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7555094f-8f7b-4958-8e1f-60c7a2312235 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.706084] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a9e3a317-7216-475d-ad14-0bdf4d7da8ff tempest-FloatingIPsAssociationTestJSON-263007202 tempest-FloatingIPsAssociationTestJSON-263007202-project-member] Lock "45a4b511-aa6a-433d-b136-f53686db9575" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.376s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.710505] env[65503]: DEBUG nova.policy [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5203efa0354baca5354d76cf3365c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf54348a3d0948cfa816cc3746e86806', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 870.712611] env[65503]: INFO nova.compute.manager [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Took 60.27 seconds to build instance. [ 870.716885] env[65503]: DEBUG oslo_concurrency.lockutils [None req-04cf3ce3-7e56-417d-9d71-414874d5ad54 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.624s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.724609] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 870.724609] env[65503]: value = "task-4450123" [ 870.724609] env[65503]: _type = "Task" [ 870.724609] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.736106] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.023612] env[65503]: INFO nova.compute.manager [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Rescuing [ 871.023768] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.023939] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquired lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.024168] env[65503]: DEBUG nova.network.neutron [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 871.040103] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450120, 'name': CreateVM_Task, 'duration_secs': 1.333213} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.040407] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.040906] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.041201] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.041537] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.041810] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-675a8ce0-c211-4e80-91d9-89880904b110 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.048150] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 871.048150] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ab937a-6623-5807-0887-53e1333f5713" [ 871.048150] env[65503]: _type = "Task" [ 871.048150] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.059969] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 9297d849-a966-48da-ba6a-453c42b99e44] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 871.061938] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ab937a-6623-5807-0887-53e1333f5713, 'name': SearchDatastore_Task, 'duration_secs': 0.009976} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.062575] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.062742] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 871.063059] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.063247] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.063458] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 871.063827] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd45fbda-1827-4626-b2e7-29eff766be61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.073781] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 871.073934] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 871.074784] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66f14be1-8f70-4291-8877-e7e3bf60c67c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.083561] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 871.083561] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a713ee-7b36-1f86-bbaa-cba91f40f90f" [ 871.083561] env[65503]: _type = "Task" [ 871.083561] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.097102] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a713ee-7b36-1f86-bbaa-cba91f40f90f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.220614] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9072cf5e-16fe-4719-a7ff-ab15caae3963 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "429b7542-c288-4a7a-9032-09881938b256" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.800s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.236881] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450123, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.238370] env[65503]: DEBUG nova.network.neutron [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Successfully created port: 19156670-d2c9-45a9-b9a1-2ab187cf5f4f {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 871.528638] env[65503]: WARNING neutronclient.v2_0.client [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 871.529326] env[65503]: WARNING openstack [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 871.529978] env[65503]: WARNING openstack [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 871.563738] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 1c598208-a4d0-46b8-9a9c-107353e957b9] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 871.598271] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a713ee-7b36-1f86-bbaa-cba91f40f90f, 'name': SearchDatastore_Task, 'duration_secs': 0.011239} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.599717] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a71b456e-bafc-4bb3-9522-9b05af2eac3f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.610681] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 871.610681] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52460ca0-5a57-4177-461b-b1e228b330e8" [ 871.610681] env[65503]: _type = "Task" [ 871.610681] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.621639] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52460ca0-5a57-4177-461b-b1e228b330e8, 'name': SearchDatastore_Task, 'duration_secs': 0.010691} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.622755] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.623086] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.623924] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7053f8-e8f0-44fe-99e3-87be58f9a2dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.627513] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c869decb-35e0-4cc6-a4c8-b2c1f855f7c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.638305] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad99a8b-3e3b-4252-9eab-8db1e114bc7f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.641603] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 871.641603] env[65503]: value = "task-4450124" [ 871.641603] env[65503]: _type = "Task" [ 871.641603] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.675927] env[65503]: DEBUG nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 871.680111] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc014d01-0b67-409f-b015-4e6f827a3f58 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.689915] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.696089] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f2d046-735f-4268-b6f3-b8d921a0663a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.713062] env[65503]: DEBUG nova.compute.provider_tree [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.725657] env[65503]: DEBUG nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 871.731442] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 871.731442] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 871.731442] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 871.731442] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 871.731442] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 871.731442] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 871.731738] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 871.731738] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 871.731786] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 871.731969] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 871.732104] env[65503]: DEBUG nova.virt.hardware [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 871.733775] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705b395a-3093-4179-bf5d-ef9b9a48274e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.752039] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630bb186-930b-47fa-9e57-0156c2858003 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.756868] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450123, 'name': ReconfigVM_Task, 'duration_secs': 0.676123} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.757310] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 628e67fa-9a28-468f-85ad-990d3f1e5d8c/628e67fa-9a28-468f-85ad-990d3f1e5d8c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.758272] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f45a08d-dc27-4824-9514-11f2c26a2851 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.771438] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 871.771438] env[65503]: value = "task-4450125" [ 871.771438] env[65503]: _type = "Task" [ 871.771438] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.781216] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450125, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.070439] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: a1908e71-31f9-4308-b4d6-7908d3208c5a] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 872.154647] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517152} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.155285] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.155285] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.155773] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60e605d4-5b0b-412b-bf86-70997fda2412 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.165371] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 872.165371] env[65503]: value = "task-4450126" [ 872.165371] env[65503]: _type = "Task" [ 872.165371] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.179729] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.219058] env[65503]: DEBUG nova.scheduler.client.report [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.259315] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.284109] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450125, 'name': Rename_Task, 'duration_secs': 0.193613} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.284481] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.284797] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-481e160c-9ffe-4311-8223-f3fa7281fac4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.293770] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 872.293770] env[65503]: value = "task-4450127" [ 872.293770] env[65503]: _type = "Task" [ 872.293770] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.306070] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.391707] env[65503]: WARNING openstack [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 872.392131] env[65503]: WARNING openstack [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 872.574792] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 5e2cf383-312b-404f-acff-2ecb75678600] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 872.679764] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08069} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.680038] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.680893] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d631cb99-355a-4b77-9fd3-d7fb16bb7ba5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.701753] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.702257] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56eb3eb0-de95-4340-96fa-2869ad6276e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.723334] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 872.723334] env[65503]: value = "task-4450128" [ 872.723334] env[65503]: _type = "Task" [ 872.723334] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.726267] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.726775] env[65503]: DEBUG nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 872.729607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.539s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.730079] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.732247] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.452s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.732247] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.734319] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.492s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.735338] env[65503]: INFO nova.compute.claims [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.744690] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.768949] env[65503]: INFO nova.scheduler.client.report [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleted allocations for instance f840b178-fd54-4c84-808c-a14c99a5ecdd [ 872.771538] env[65503]: INFO nova.scheduler.client.report [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Deleted allocations for instance 62a18449-7cec-4785-a340-d0450adc8044 [ 872.811018] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450127, 'name': PowerOnVM_Task} progress is 78%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.819030] env[65503]: WARNING neutronclient.v2_0.client [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 872.820567] env[65503]: WARNING openstack [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 872.820567] env[65503]: WARNING openstack [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 872.909517] env[65503]: DEBUG nova.network.neutron [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Successfully updated port: 19156670-d2c9-45a9-b9a1-2ab187cf5f4f {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 873.007404] env[65503]: DEBUG nova.network.neutron [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 873.078143] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: eec6a484-ab00-402e-a369-c3009065c553] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 873.234918] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.241028] env[65503]: DEBUG nova.compute.utils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 873.245576] env[65503]: DEBUG nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 873.286298] env[65503]: DEBUG oslo_concurrency.lockutils [None req-24920011-db3b-4b31-8f89-56bc4aa4e19b tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "f840b178-fd54-4c84-808c-a14c99a5ecdd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.575s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.287700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-41d710f4-229c-4545-9042-2c3f971920fe tempest-ListServersNegativeTestJSON-1298772713 tempest-ListServersNegativeTestJSON-1298772713-project-member] Lock "62a18449-7cec-4785-a340-d0450adc8044" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.476s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.309186] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450127, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.412509] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.412509] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.412509] env[65503]: DEBUG nova.network.neutron [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 873.502691] env[65503]: DEBUG nova.compute.manager [req-b4bdb828-4389-423c-9818-b033c066d7e2 req-7184044a-9aab-4068-9cfe-089d230a3acc service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Received event network-vif-plugged-19156670-d2c9-45a9-b9a1-2ab187cf5f4f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 873.502965] env[65503]: DEBUG oslo_concurrency.lockutils [req-b4bdb828-4389-423c-9818-b033c066d7e2 req-7184044a-9aab-4068-9cfe-089d230a3acc service nova] Acquiring lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.503177] env[65503]: DEBUG oslo_concurrency.lockutils [req-b4bdb828-4389-423c-9818-b033c066d7e2 req-7184044a-9aab-4068-9cfe-089d230a3acc service nova] Lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.503375] env[65503]: DEBUG oslo_concurrency.lockutils [req-b4bdb828-4389-423c-9818-b033c066d7e2 req-7184044a-9aab-4068-9cfe-089d230a3acc service nova] Lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.504061] env[65503]: DEBUG nova.compute.manager [req-b4bdb828-4389-423c-9818-b033c066d7e2 req-7184044a-9aab-4068-9cfe-089d230a3acc service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] No waiting events found dispatching network-vif-plugged-19156670-d2c9-45a9-b9a1-2ab187cf5f4f {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 873.504061] env[65503]: WARNING nova.compute.manager [req-b4bdb828-4389-423c-9818-b033c066d7e2 req-7184044a-9aab-4068-9cfe-089d230a3acc service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Received unexpected event network-vif-plugged-19156670-d2c9-45a9-b9a1-2ab187cf5f4f for instance with vm_state building and task_state spawning. [ 873.510500] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Releasing lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.582181] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: db942a2d-671b-4036-a80b-d2375145cd29] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 873.734045] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450128, 'name': ReconfigVM_Task, 'duration_secs': 0.648057} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.734391] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.735133] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a24b84f-0d8c-4027-a476-a6b8b7ad7b58 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.742159] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 873.742159] env[65503]: value = "task-4450129" [ 873.742159] env[65503]: _type = "Task" [ 873.742159] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.746116] env[65503]: DEBUG nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 873.758795] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450129, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.813140] env[65503]: DEBUG oslo_vmware.api [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450127, 'name': PowerOnVM_Task, 'duration_secs': 1.024043} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.813541] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.813614] env[65503]: INFO nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Took 8.66 seconds to spawn the instance on the hypervisor. [ 873.813809] env[65503]: DEBUG nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 873.815209] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2346b6-9934-4792-bc2a-e2b48e617464 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.918446] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 873.920844] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.086076] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 50f11559-b8c7-41a2-aa43-255a28ffa58c] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 874.108460] env[65503]: DEBUG nova.network.neutron [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 874.159385] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.160050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.264247] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450129, 'name': Rename_Task, 'duration_secs': 0.288673} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.264329] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.264622] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12fc28be-3846-4bf8-ac23-dc897c7cc7db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.274247] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 874.274247] env[65503]: value = "task-4450130" [ 874.274247] env[65503]: _type = "Task" [ 874.274247] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.280857] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb48dece-59a3-47d0-a6df-40db009142a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.293404] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450130, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.297584] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b5ae42-7c0b-48d9-98c8-c355228c4ac2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.303594] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.303946] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.345895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08cf46d-179f-4bfa-adcb-973cd7eb7a84 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.353738] env[65503]: INFO nova.compute.manager [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Took 58.84 seconds to build instance. [ 874.358441] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e9ee3a-5cf4-4e40-ae30-3ec7ab94845b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.375161] env[65503]: DEBUG nova.compute.provider_tree [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.590750] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 2dbc3860-c65c-4cbb-8d90-f1f74420e652] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 874.765632] env[65503]: WARNING neutronclient.v2_0.client [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.769763] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.769763] env[65503]: WARNING openstack [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.778306] env[65503]: DEBUG nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 874.793506] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450130, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.810689] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 874.810961] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 874.811123] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 874.811744] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 874.811943] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 874.812105] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 874.812317] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 874.812473] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 874.812674] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 874.812903] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 874.813122] env[65503]: DEBUG nova.virt.hardware [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 874.814010] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61f1c9c-a544-4793-be3f-110fa9da2f07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.826545] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a5dd69-218c-40eb-b508-e2c839d079f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.843587] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.849616] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Creating folder: Project (a59e4b891f5b4aa4b54a4fd991b3b840). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.855580] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-21edf931-33ad-4b86-b345-307ddd9972e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.858087] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ed1021b3-dc88-4a8c-a5b5-0aa57927b945 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.266s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.871076] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Created folder: Project (a59e4b891f5b4aa4b54a4fd991b3b840) in parent group-v870190. [ 874.871292] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Creating folder: Instances. Parent ref: group-v870378. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.872255] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22db520e-51b0-41ef-bf59-382707094ba1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.883904] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Created folder: Instances in parent group-v870378. [ 874.884193] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 874.884409] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.884628] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41c21776-88f7-45e0-bdfe-45d354688885 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.902397] env[65503]: ERROR nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [req-e130c584-980a-427f-86a6-ba8b7a33c8f5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e130c584-980a-427f-86a6-ba8b7a33c8f5"}]} [ 874.906580] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.906580] env[65503]: value = "task-4450133" [ 874.906580] env[65503]: _type = "Task" [ 874.906580] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.916375] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450133, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.922701] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 874.945487] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 874.945711] env[65503]: DEBUG nova.compute.provider_tree [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.955973] env[65503]: DEBUG nova.network.neutron [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance_info_cache with network_info: [{"id": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "address": "fa:16:3e:9b:39:b6", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19156670-d2", "ovs_interfaceid": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 874.965275] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 874.987419] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 875.056792] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.057116] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f90ad083-0ace-4b76-abaa-59dd22487207 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.065823] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 875.065823] env[65503]: value = "task-4450134" [ 875.065823] env[65503]: _type = "Task" [ 875.065823] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.076682] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.100105] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 02b3b44e-96bb-47a0-8aa0-7026d987cad8] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 875.293908] env[65503]: DEBUG oslo_vmware.api [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450130, 'name': PowerOnVM_Task, 'duration_secs': 0.918126} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.293908] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.293908] env[65503]: INFO nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Took 5.88 seconds to spawn the instance on the hypervisor. [ 875.293908] env[65503]: DEBUG nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 875.294618] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68f7275-8f94-46da-9221-078c74e82cd2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.363504] env[65503]: DEBUG nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 875.418509] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450133, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.458451] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.458848] env[65503]: DEBUG nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Instance network_info: |[{"id": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "address": "fa:16:3e:9b:39:b6", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19156670-d2", "ovs_interfaceid": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 875.461863] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:39:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19156670-d2c9-45a9-b9a1-2ab187cf5f4f', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.469798] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 875.471322] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 875.471322] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b38f0a93-4de5-4dad-b572-a1651e347533 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.495836] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.495836] env[65503]: value = "task-4450135" [ 875.495836] env[65503]: _type = "Task" [ 875.495836] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.506354] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450135, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.535198] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413959fc-93da-4a73-b1b2-ce28ccad5140 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.544844] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0996f9-9c58-4531-b2d2-e6b0881e6bb5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.582890] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a4b2a0-3aea-441c-97d4-bcf586198aa3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.591987] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450134, 'name': PowerOffVM_Task, 'duration_secs': 0.248864} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.594455] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.595365] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17f1f27-580a-46c5-955a-293f19c35e84 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.598904] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6febb449-f711-4fda-ad5d-9b5961a826e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.604989] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: c703d7aa-62d6-422e-b4ae-ba6ba1e3bad3] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 875.615308] env[65503]: DEBUG nova.compute.provider_tree [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 875.631692] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498880d4-d6ed-401e-9bdb-5afeb8afeea5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.673063] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.673494] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd536a51-eb4e-43a5-8b48-8591fe246378 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.682271] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 875.682271] env[65503]: value = "task-4450136" [ 875.682271] env[65503]: _type = "Task" [ 875.682271] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.693712] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 875.694153] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.694509] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.694750] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.695118] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.695499] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39b2acb2-5a01-44c3-b063-6d896d06a4e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.707968] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.707968] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.710200] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce33f9a5-fb5f-4f3d-92b0-d9ae47b1d6a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.721049] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 875.721049] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c2c67e-2154-8af7-08a5-e0bbf0dfa99c" [ 875.721049] env[65503]: _type = "Task" [ 875.721049] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.731128] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c2c67e-2154-8af7-08a5-e0bbf0dfa99c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.820830] env[65503]: INFO nova.compute.manager [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Took 47.75 seconds to build instance. [ 875.905703] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.920477] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450133, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.007300] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450135, 'name': CreateVM_Task, 'duration_secs': 0.431058} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.007590] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 876.008124] env[65503]: WARNING neutronclient.v2_0.client [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 876.008515] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.008661] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.008974] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 876.009264] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fe0e9cb-bc08-44d0-a2a3-26f9ae9e6f0b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.016679] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 876.016679] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210da51-2ef6-2f6d-217a-07d2872c7b37" [ 876.016679] env[65503]: _type = "Task" [ 876.016679] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.026813] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210da51-2ef6-2f6d-217a-07d2872c7b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.034164] env[65503]: DEBUG nova.compute.manager [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Received event network-changed-19156670-d2c9-45a9-b9a1-2ab187cf5f4f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 876.034164] env[65503]: DEBUG nova.compute.manager [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Refreshing instance network info cache due to event network-changed-19156670-d2c9-45a9-b9a1-2ab187cf5f4f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 876.034164] env[65503]: DEBUG oslo_concurrency.lockutils [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Acquiring lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.034164] env[65503]: DEBUG oslo_concurrency.lockutils [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Acquired lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.034164] env[65503]: DEBUG nova.network.neutron [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Refreshing network info cache for port 19156670-d2c9-45a9-b9a1-2ab187cf5f4f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 876.118465] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: c6aecf44-9a23-47a2-b1aa-6530b4119b1d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 876.157251] env[65503]: ERROR nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [req-fd2edd35-e3ba-4cb9-bd84-bd2a76529b22] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fd2edd35-e3ba-4cb9-bd84-bd2a76529b22"}]} [ 876.180995] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 876.215088] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 876.215376] env[65503]: DEBUG nova.compute.provider_tree [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 876.238753] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c2c67e-2154-8af7-08a5-e0bbf0dfa99c, 'name': SearchDatastore_Task, 'duration_secs': 0.013814} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.238753] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 876.238753] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06ed00f2-eb07-4706-8af3-0a47b83f13d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.245690] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 876.245690] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5228d11d-5a5b-d2fa-014b-6969cb260900" [ 876.245690] env[65503]: _type = "Task" [ 876.245690] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.257460] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5228d11d-5a5b-d2fa-014b-6969cb260900, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.290769] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 876.322792] env[65503]: DEBUG oslo_concurrency.lockutils [None req-523d0fff-795e-4a1e-8cff-b0452066b016 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "0001f4db-3073-411c-8d60-6d8528ef263a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.321s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.423034] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450133, 'name': CreateVM_Task, 'duration_secs': 1.310656} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.423239] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 876.423935] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.423935] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.424797] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 876.424797] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e0b6bcc-9639-43ed-9fd3-f6ad4fffe530 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.435146] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 876.435146] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520133b2-6dcc-c94f-a038-6bf1d6c3138b" [ 876.435146] env[65503]: _type = "Task" [ 876.435146] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.448445] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520133b2-6dcc-c94f-a038-6bf1d6c3138b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.466255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.466255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.538127] env[65503]: WARNING neutronclient.v2_0.client [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 876.539207] env[65503]: WARNING openstack [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 876.539707] env[65503]: WARNING openstack [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 876.547136] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210da51-2ef6-2f6d-217a-07d2872c7b37, 'name': SearchDatastore_Task, 'duration_secs': 0.012059} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.550795] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.551061] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.551280] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.622447] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.622447] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Cleaning up deleted instances with incomplete migration {{(pid=65503) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11954}} [ 876.672811] env[65503]: WARNING openstack [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 876.673235] env[65503]: WARNING openstack [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 876.752960] env[65503]: WARNING neutronclient.v2_0.client [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 876.753782] env[65503]: WARNING openstack [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 876.754159] env[65503]: WARNING openstack [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 876.770998] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5228d11d-5a5b-d2fa-014b-6969cb260900, 'name': SearchDatastore_Task, 'duration_secs': 0.012255} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.771731] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.772264] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 429b7542-c288-4a7a-9032-09881938b256/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. {{(pid=65503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 876.772698] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.772996] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.773412] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc532909-a504-4162-a7e0-ac6bac51963c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.776467] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23db925a-2718-4ecf-88c6-651796873f8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.783881] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df839ba1-1c81-498a-a6e3-767d140dcf82 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.788844] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.788844] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.789568] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 876.789568] env[65503]: value = "task-4450137" [ 876.789568] env[65503]: _type = "Task" [ 876.789568] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.790173] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b1ff7f5-e4d8-4750-a6e4-f24adb5df10a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.800070] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3601fff4-027d-4453-98b7-322d341f1719 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.804456] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 876.804456] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df2925-18d9-0e2e-97fb-a0eb11ba6a1c" [ 876.804456] env[65503]: _type = "Task" [ 876.804456] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.839675] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.850414] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0868eea9-8b48-4db2-a1d9-c2ee1f0cd6ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.862501] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df2925-18d9-0e2e-97fb-a0eb11ba6a1c, 'name': SearchDatastore_Task, 'duration_secs': 0.018138} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.864113] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c36053c-8835-4ea9-a48b-febea4745f93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.872695] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e618be3-24f8-4eb0-ad62-78b507a362e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.880457] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 876.880457] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521c7ac6-cf7d-bf26-f0df-70eaf4c79243" [ 876.880457] env[65503]: _type = "Task" [ 876.880457] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.900532] env[65503]: DEBUG nova.compute.provider_tree [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 876.901095] env[65503]: DEBUG nova.network.neutron [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updated VIF entry in instance network info cache for port 19156670-d2c9-45a9-b9a1-2ab187cf5f4f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 876.904577] env[65503]: DEBUG nova.network.neutron [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance_info_cache with network_info: [{"id": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "address": "fa:16:3e:9b:39:b6", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19156670-d2", "ovs_interfaceid": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 876.909296] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521c7ac6-cf7d-bf26-f0df-70eaf4c79243, 'name': SearchDatastore_Task, 'duration_secs': 0.009998} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.909707] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.909831] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 708ed8ab-0ec9-457c-966d-b11c55895981/708ed8ab-0ec9-457c-966d-b11c55895981.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.910982] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c7291bb-131e-420d-aeb7-c645e85716dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.918714] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 876.918714] env[65503]: value = "task-4450138" [ 876.918714] env[65503]: _type = "Task" [ 876.918714] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.929440] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450138, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.950167] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520133b2-6dcc-c94f-a038-6bf1d6c3138b, 'name': SearchDatastore_Task, 'duration_secs': 0.010017} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.950632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.950910] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.951259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.951417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.951590] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.951983] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a004aeb-615d-4899-b6eb-ff7258aede70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.973781] env[65503]: DEBUG nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 876.976776] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.976961] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.978124] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c48ef809-0a8e-46f0-8bee-042f1022f791 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.985755] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 876.985755] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a4465f-68cc-efea-c764-4ffbc80d5270" [ 876.985755] env[65503]: _type = "Task" [ 876.985755] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.995294] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a4465f-68cc-efea-c764-4ffbc80d5270, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.030536] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.030881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.031217] env[65503]: INFO nova.compute.manager [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Shelving [ 877.127466] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 877.306670] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496237} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.307471] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 429b7542-c288-4a7a-9032-09881938b256/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. [ 877.307895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfd8e05-d605-430a-b516-daa01b2368af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.336129] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 429b7542-c288-4a7a-9032-09881938b256/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.336129] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-550b0d15-fed9-44c4-b040-65c645145184 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.358362] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 877.358362] env[65503]: value = "task-4450139" [ 877.358362] env[65503]: _type = "Task" [ 877.358362] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.368612] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450139, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.404379] env[65503]: DEBUG oslo_concurrency.lockutils [req-14a8b58c-1b7f-49fb-8084-4a5970b7a6ed req-951fc69d-7b01-42ca-9e12-7b5bb52a8395 service nova] Releasing lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.439127] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450138, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.461944] env[65503]: DEBUG nova.scheduler.client.report [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 877.462242] env[65503]: DEBUG nova.compute.provider_tree [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 93 to 94 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 877.462443] env[65503]: DEBUG nova.compute.provider_tree [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.503453] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a4465f-68cc-efea-c764-4ffbc80d5270, 'name': SearchDatastore_Task, 'duration_secs': 0.025279} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.505107] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fc2c180-13e9-4980-9678-db39fb381b9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.508581] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.514187] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 877.514187] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52265075-2bd0-8065-b0e0-99e6e4b23157" [ 877.514187] env[65503]: _type = "Task" [ 877.514187] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.524846] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52265075-2bd0-8065-b0e0-99e6e4b23157, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.871811] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.930854] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450138, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.757485} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.931108] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 708ed8ab-0ec9-457c-966d-b11c55895981/708ed8ab-0ec9-457c-966d-b11c55895981.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.931313] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.931582] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ad5d95b-260b-46a8-805e-350b40745e98 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.940244] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 877.940244] env[65503]: value = "task-4450140" [ 877.940244] env[65503]: _type = "Task" [ 877.940244] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.954511] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.971046] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.237s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.971932] env[65503]: DEBUG nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 877.974996] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.163s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.976690] env[65503]: INFO nova.compute.claims [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.996733] env[65503]: INFO nova.compute.manager [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Rebuilding instance [ 878.028917] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52265075-2bd0-8065-b0e0-99e6e4b23157, 'name': SearchDatastore_Task, 'duration_secs': 0.033498} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.032880] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.033208] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.033939] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e0a2988-7bda-4999-bdf5-002eb839268b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.042294] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.042475] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 878.042475] env[65503]: value = "task-4450141" [ 878.042475] env[65503]: _type = "Task" [ 878.042475] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.043386] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98c273b1-1864-4fec-a616-51830d87d2d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.055191] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.058947] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 878.058947] env[65503]: value = "task-4450142" [ 878.058947] env[65503]: _type = "Task" [ 878.058947] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.058947] env[65503]: DEBUG nova.compute.manager [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 878.060066] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75410b1-052b-4eea-b8f7-6e21d160d4bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.370591] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450139, 'name': ReconfigVM_Task, 'duration_secs': 0.800607} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.370961] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 429b7542-c288-4a7a-9032-09881938b256/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.372080] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d172b3c8-3cb9-4042-8a6c-b6e70a9b6dc6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.406099] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-962648cf-9552-4f98-89a2-6e388b690d36 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.424625] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 878.424625] env[65503]: value = "task-4450143" [ 878.424625] env[65503]: _type = "Task" [ 878.424625] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.435919] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450143, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.452147] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068776} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.452545] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.453509] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484c0650-6a42-41f9-8c90-4661ca7d1c33 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.480800] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 708ed8ab-0ec9-457c-966d-b11c55895981/708ed8ab-0ec9-457c-966d-b11c55895981.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.482717] env[65503]: DEBUG nova.compute.utils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.488079] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cc9a0f5-9dbe-4c62-8dd7-f961dd66581e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.504883] env[65503]: DEBUG nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 878.505179] env[65503]: DEBUG nova.network.neutron [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 878.505543] env[65503]: WARNING neutronclient.v2_0.client [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 878.505910] env[65503]: WARNING neutronclient.v2_0.client [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 878.509573] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 878.509573] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 878.523435] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 878.523435] env[65503]: value = "task-4450144" [ 878.523435] env[65503]: _type = "Task" [ 878.523435] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.535192] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450144, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.558414] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450141, 'name': CopyVirtualDisk_Task} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.558767] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.559082] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.559571] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ba9a712-4f5f-4256-b1c1-5c713d8d5ba5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.574742] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450142, 'name': PowerOffVM_Task, 'duration_secs': 0.455438} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.576494] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.576994] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 878.576994] env[65503]: value = "task-4450145" [ 878.576994] env[65503]: _type = "Task" [ 878.576994] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.580775] env[65503]: DEBUG nova.policy [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d16022d9f8b43ba8e97191fdc2b1a8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3658921b747e4d78a2046b838cb36d26', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 878.583424] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb5facd-9965-4524-a01d-e7053914819d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.613207] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450145, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.615740] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e2f33d-a9e3-489e-90be-829229fa11af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.939406] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450143, 'name': ReconfigVM_Task, 'duration_secs': 0.37602} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.939791] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.939968] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26c74337-abde-4ab8-b084-71a64b2fcf0f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.943568] env[65503]: DEBUG nova.network.neutron [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Successfully created port: e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 878.951798] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 878.951798] env[65503]: value = "task-4450146" [ 878.951798] env[65503]: _type = "Task" [ 878.951798] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.961137] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450146, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.004257] env[65503]: DEBUG nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 879.035264] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450144, 'name': ReconfigVM_Task, 'duration_secs': 0.331348} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.038089] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 708ed8ab-0ec9-457c-966d-b11c55895981/708ed8ab-0ec9-457c-966d-b11c55895981.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.038944] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e722470-f745-4229-9ed9-3eaa2a0a3c88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.048144] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 879.048144] env[65503]: value = "task-4450147" [ 879.048144] env[65503]: _type = "Task" [ 879.048144] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.061507] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450147, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.095865] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.099236] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-880273bf-19ff-48e3-a0a9-d1e709bc0cc7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.100984] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450145, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066919} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.101261] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.102526] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48679476-7e5d-4c60-860e-cc3c8c997d45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.111066] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 879.111066] env[65503]: value = "task-4450148" [ 879.111066] env[65503]: _type = "Task" [ 879.111066] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.128045] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.135258] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 879.135926] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebb13cb0-4ba4-4f54-8290-4260f9282f4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.151381] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8b089d1f-bd0c-4b86-8713-a3e1312cc983 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.160461] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.163113] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 879.163113] env[65503]: value = "task-4450149" [ 879.163113] env[65503]: _type = "Task" [ 879.163113] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.163515] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 879.163515] env[65503]: value = "task-4450150" [ 879.163515] env[65503]: _type = "Task" [ 879.163515] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.180067] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450150, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.183710] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450149, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.466693] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450146, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.506516] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9704e277-13a9-4267-a092-6c1aa7fde296 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.519471] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6702ded5-d001-4fea-8eed-cd407a4ee89a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.559011] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884446e5-13ae-4e7f-a591-50f3bcb142bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.573451] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0162b3c6-4325-4b44-ac3b-6b9aa07ce656 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.577981] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450147, 'name': Rename_Task, 'duration_secs': 0.167693} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.579083] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.579842] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79e759c8-9231-4415-9aa7-efeb5fc2aee7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.591192] env[65503]: DEBUG nova.compute.provider_tree [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.594114] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 879.594114] env[65503]: value = "task-4450151" [ 879.594114] env[65503]: _type = "Task" [ 879.594114] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.605592] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.631026] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450148, 'name': PowerOffVM_Task, 'duration_secs': 0.154029} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.631172] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.631831] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.632706] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959ba17b-7348-4ed9-87a4-c25f34d8091d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.641435] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.641785] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff31b186-0b83-48fb-9cde-753a56e2f383 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.675686] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.675897] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.676093] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Deleting the datastore file [datastore1] 0001f4db-3073-411c-8d60-6d8528ef263a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.677123] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d68f3a03-f35d-413a-aabf-29bbcc491abd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.682824] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450149, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.686881] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.693115] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 879.693115] env[65503]: value = "task-4450153" [ 879.693115] env[65503]: _type = "Task" [ 879.693115] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.702277] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450153, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.967873] env[65503]: DEBUG oslo_vmware.api [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450146, 'name': PowerOnVM_Task, 'duration_secs': 0.53265} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.968198] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.971973] env[65503]: DEBUG nova.compute.manager [None req-e200605c-f304-4798-bd01-aaa251e19985 tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 879.973057] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6c4b45-2802-42d5-8b23-a0285c6c970c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.022048] env[65503]: DEBUG nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 880.051478] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 880.051874] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.052135] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 880.052425] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.052733] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 880.052947] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 880.053298] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.053557] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 880.053820] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 880.054090] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 880.054368] env[65503]: DEBUG nova.virt.hardware [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 880.055682] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ffd3b5-a49c-447a-8b4a-73f0c8d02cba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.066978] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d77e5f-6adb-4e16-b040-761114ef5c75 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.096189] env[65503]: DEBUG nova.scheduler.client.report [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.111081] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450151, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.181140] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450150, 'name': ReconfigVM_Task, 'duration_secs': 0.691236} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.184742] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Reconfigured VM instance instance-00000044 to attach disk [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.185603] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450149, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.185759] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5486cda-266e-434c-b634-74fce6f004aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.195799] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 880.195799] env[65503]: value = "task-4450154" [ 880.195799] env[65503]: _type = "Task" [ 880.195799] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.209782] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450154, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.213536] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450153, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106093} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.213866] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.214158] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.214303] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.505256] env[65503]: DEBUG nova.network.neutron [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Successfully updated port: e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 880.542888] env[65503]: DEBUG nova.compute.manager [req-6a5826f0-a486-4369-ab79-dfbeb982057d req-b16efb78-90b4-4b6d-8515-088ef4073c14 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Received event network-vif-plugged-e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 880.543143] env[65503]: DEBUG oslo_concurrency.lockutils [req-6a5826f0-a486-4369-ab79-dfbeb982057d req-b16efb78-90b4-4b6d-8515-088ef4073c14 service nova] Acquiring lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.543355] env[65503]: DEBUG oslo_concurrency.lockutils [req-6a5826f0-a486-4369-ab79-dfbeb982057d req-b16efb78-90b4-4b6d-8515-088ef4073c14 service nova] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.543854] env[65503]: DEBUG oslo_concurrency.lockutils [req-6a5826f0-a486-4369-ab79-dfbeb982057d req-b16efb78-90b4-4b6d-8515-088ef4073c14 service nova] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.544081] env[65503]: DEBUG nova.compute.manager [req-6a5826f0-a486-4369-ab79-dfbeb982057d req-b16efb78-90b4-4b6d-8515-088ef4073c14 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] No waiting events found dispatching network-vif-plugged-e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 880.544308] env[65503]: WARNING nova.compute.manager [req-6a5826f0-a486-4369-ab79-dfbeb982057d req-b16efb78-90b4-4b6d-8515-088ef4073c14 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Received unexpected event network-vif-plugged-e2945b8a-8327-4ac8-8d42-fc828663c0e0 for instance with vm_state building and task_state spawning. [ 880.606672] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.608616] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.948s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.611026] env[65503]: INFO nova.compute.claims [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.619067] env[65503]: DEBUG oslo_vmware.api [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450151, 'name': PowerOnVM_Task, 'duration_secs': 0.555798} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.619252] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.619497] env[65503]: INFO nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Took 8.94 seconds to spawn the instance on the hypervisor. [ 880.619599] env[65503]: DEBUG nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 880.620410] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a985026-abee-4f7f-9226-dbf8ef277e0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.678909] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450149, 'name': CreateSnapshot_Task, 'duration_secs': 1.112912} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.679394] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 880.679992] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cd8314-ecf2-4fb5-a498-0376228ff4c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.711182] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450154, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.011803] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.012010] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.012208] env[65503]: DEBUG nova.network.neutron [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 881.117325] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "715a3a0b-0773-44d5-8e9f-359fdbc2416c" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.117564] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "715a3a0b-0773-44d5-8e9f-359fdbc2416c" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.139651] env[65503]: INFO nova.compute.manager [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Took 50.08 seconds to build instance. [ 881.199349] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 881.199462] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bc3345c4-7ad5-4cd4-9c7e-340214d49f0b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.211985] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450154, 'name': Rename_Task, 'duration_secs': 0.728972} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.213442] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 881.214045] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 881.214045] env[65503]: value = "task-4450155" [ 881.214045] env[65503]: _type = "Task" [ 881.214045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.214045] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8094d79-a0f4-4698-9928-72b70f0b0452 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.228817] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450155, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.230618] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 881.230618] env[65503]: value = "task-4450156" [ 881.230618] env[65503]: _type = "Task" [ 881.230618] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.239961] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.253969] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 881.254149] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 881.254354] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 881.254550] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 881.254722] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 881.254827] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 881.255048] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.255204] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 881.255401] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 881.255583] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 881.255981] env[65503]: DEBUG nova.virt.hardware [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 881.256695] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77529de-5a16-491d-bc9f-2282aa3c5686 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.266272] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b789a369-26af-42cc-a347-6f358777b893 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.271394] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Acquiring lock "38e9a714-87f8-422c-9cc5-09b6aec76198" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.272049] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.272049] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Acquiring lock "38e9a714-87f8-422c-9cc5-09b6aec76198-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.272197] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.272338] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.274639] env[65503]: INFO nova.compute.manager [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Terminating instance [ 881.289429] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.296726] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 881.297901] env[65503]: DEBUG nova.compute.manager [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 881.298170] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.298776] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.298966] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc260c6e-2832-404e-ba41-543005f1ea67 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.300808] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-957e1599-dfed-4a7a-af93-73d38b0d8c91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.321679] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 881.321679] env[65503]: value = "task-4450157" [ 881.321679] env[65503]: _type = "Task" [ 881.321679] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.322699] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.322699] env[65503]: value = "task-4450158" [ 881.322699] env[65503]: _type = "Task" [ 881.322699] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.337283] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450158, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.340772] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.515602] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 881.516154] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 881.555643] env[65503]: DEBUG nova.network.neutron [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 881.625400] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "715a3a0b-0773-44d5-8e9f-359fdbc2416c" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.508s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.627030] env[65503]: DEBUG nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 881.645447] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 881.646024] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 881.656101] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea94d55b-d070-4c87-a027-583ba4c7514b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.114s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.729132] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450155, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.730073] env[65503]: WARNING neutronclient.v2_0.client [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 881.731030] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 881.731710] env[65503]: WARNING openstack [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 881.753633] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450156, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.823270] env[65503]: DEBUG nova.network.neutron [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 881.846916] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450157, 'name': PowerOffVM_Task, 'duration_secs': 0.335855} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.846916] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450158, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.846916] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.847225] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 881.847311] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870259', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'name': 'volume-760148f4-1011-4972-a0ed-b18b4fd3f079', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '38e9a714-87f8-422c-9cc5-09b6aec76198', 'attached_at': '', 'detached_at': '', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'serial': '760148f4-1011-4972-a0ed-b18b4fd3f079'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 881.848216] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c02bea7-b2ec-4e11-a5f3-76b3cc196bc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.874355] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57378ef7-36aa-4f4d-9308-04b65967677f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.882778] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b7929a-a035-4e06-9c9b-f5e79bd66206 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.906108] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d825d1da-48a4-4e6a-ac7a-6d9c4be00cbf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.923750] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] The volume has not been displaced from its original location: [datastore2] volume-760148f4-1011-4972-a0ed-b18b4fd3f079/volume-760148f4-1011-4972-a0ed-b18b4fd3f079.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 881.929217] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Reconfiguring VM instance instance-00000020 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 881.932215] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ddc639d-7229-4b71-8edf-eca744fa25e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.952193] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 881.952193] env[65503]: value = "task-4450159" [ 881.952193] env[65503]: _type = "Task" [ 881.952193] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.965024] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.135477] env[65503]: DEBUG nova.compute.utils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 882.136740] env[65503]: DEBUG nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 882.136933] env[65503]: DEBUG nova.network.neutron [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 882.137266] env[65503]: WARNING neutronclient.v2_0.client [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 882.139018] env[65503]: WARNING neutronclient.v2_0.client [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 882.139018] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 882.139018] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 882.194292] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d58067-0849-41af-a7a7-10aaa5c8a092 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.206792] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8080685-705d-4fc6-9fae-e4751f9ad66f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.244212] env[65503]: DEBUG nova.policy [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '459f0c39eef04215822e2ff3c777c709', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b01ea2e165f5478ebdb544d083648600', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 882.249749] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8232301-f758-4b97-bb48-dba9abe0bc6d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.272735] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50205c35-a776-4f1a-98e1-845184e75e56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.278876] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450155, 'name': CloneVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.279254] env[65503]: DEBUG oslo_vmware.api [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450156, 'name': PowerOnVM_Task, 'duration_secs': 0.851742} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.279691] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.279977] env[65503]: INFO nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Took 7.50 seconds to spawn the instance on the hypervisor. [ 882.280238] env[65503]: DEBUG nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 882.282036] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e640550-c9ed-43e5-ab1e-5dae2374cda3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.294615] env[65503]: DEBUG nova.compute.provider_tree [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.329991] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.330421] env[65503]: DEBUG nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Instance network_info: |[{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 882.331070] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:1b:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2945b8a-8327-4ac8-8d42-fc828663c0e0', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.341030] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating folder: Project (3658921b747e4d78a2046b838cb36d26). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.344157] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10dce762-cef0-418d-85ed-812ab64900e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.352461] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450158, 'name': CreateVM_Task, 'duration_secs': 0.598513} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.353577] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.354047] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.354047] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.354418] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 882.354828] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-276faabb-d711-43a4-ac5f-491cc69c6b29 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.359422] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Created folder: Project (3658921b747e4d78a2046b838cb36d26) in parent group-v870190. [ 882.359644] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating folder: Instances. Parent ref: group-v870385. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.359921] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b5f2bb3-fc89-44a7-ad17-ab9e91f31583 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.364332] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 882.364332] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524fc2a5-91ed-7fb7-99dd-dd566be9ff34" [ 882.364332] env[65503]: _type = "Task" [ 882.364332] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.377489] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524fc2a5-91ed-7fb7-99dd-dd566be9ff34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.379436] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Created folder: Instances in parent group-v870385. [ 882.379436] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 882.379584] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.379810] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4379f857-bf3d-4974-a883-93d5664f9569 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.402413] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.402413] env[65503]: value = "task-4450162" [ 882.402413] env[65503]: _type = "Task" [ 882.402413] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.412038] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450162, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.464983] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450159, 'name': ReconfigVM_Task, 'duration_secs': 0.246347} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.465301] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Reconfigured VM instance instance-00000020 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 882.470148] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fef29387-6962-4d14-a004-68e42155ad9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.489532] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 882.489532] env[65503]: value = "task-4450163" [ 882.489532] env[65503]: _type = "Task" [ 882.489532] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.499971] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450163, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.615671] env[65503]: DEBUG nova.network.neutron [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Successfully created port: 3e858b14-8f22-4201-9e89-d017fcba1f2d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 882.652464] env[65503]: DEBUG nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 882.752537] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450155, 'name': CloneVM_Task, 'duration_secs': 1.154352} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.752863] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Created linked-clone VM from snapshot [ 882.753927] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1bbe26-b9c7-431a-8bf8-44937dc917b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.764338] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Uploading image 9eb4755c-7e54-48f4-839b-82094d800434 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 882.796752] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 882.796752] env[65503]: value = "vm-870383" [ 882.796752] env[65503]: _type = "VirtualMachine" [ 882.796752] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 882.797865] env[65503]: DEBUG nova.scheduler.client.report [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.803027] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-aa198f35-0546-40ad-8d0c-069a40d627af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.816936] env[65503]: INFO nova.compute.manager [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Took 43.29 seconds to build instance. [ 882.821706] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lease: (returnval){ [ 882.821706] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5255c429-7291-1968-7bba-4ebf319ee404" [ 882.821706] env[65503]: _type = "HttpNfcLease" [ 882.821706] env[65503]: } obtained for exporting VM: (result){ [ 882.821706] env[65503]: value = "vm-870383" [ 882.821706] env[65503]: _type = "VirtualMachine" [ 882.821706] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 882.822116] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the lease: (returnval){ [ 882.822116] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5255c429-7291-1968-7bba-4ebf319ee404" [ 882.822116] env[65503]: _type = "HttpNfcLease" [ 882.822116] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 882.831379] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 882.831379] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5255c429-7291-1968-7bba-4ebf319ee404" [ 882.831379] env[65503]: _type = "HttpNfcLease" [ 882.831379] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 882.877678] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524fc2a5-91ed-7fb7-99dd-dd566be9ff34, 'name': SearchDatastore_Task, 'duration_secs': 0.01383} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.878098] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.878380] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.878667] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.878852] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.879128] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.879462] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64c6ff88-9deb-4d5c-9d3e-a6c63b902a07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.891102] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.891248] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.892093] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1149898b-501c-4188-a943-e2e252d16af5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.899234] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 882.899234] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5238a331-ae8c-42d4-b69f-ec45f74d8041" [ 882.899234] env[65503]: _type = "Task" [ 882.899234] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.915254] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5238a331-ae8c-42d4-b69f-ec45f74d8041, 'name': SearchDatastore_Task, 'duration_secs': 0.013149} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.920750] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450162, 'name': CreateVM_Task, 'duration_secs': 0.493502} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.921312] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-caaef841-45f2-4dfe-91e7-b2c8d4d149ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.923664] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.924276] env[65503]: WARNING neutronclient.v2_0.client [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 882.924656] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.924811] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.925241] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 882.925930] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fad4ad12-d012-478d-af73-c352ab891e38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.929531] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 882.929531] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ad4971-03fa-047a-d76f-aa1f02f3c137" [ 882.929531] env[65503]: _type = "Task" [ 882.929531] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.934908] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 882.934908] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b3a54f-f894-e2aa-380c-368e5e050ffd" [ 882.934908] env[65503]: _type = "Task" [ 882.934908] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.942323] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ad4971-03fa-047a-d76f-aa1f02f3c137, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.948771] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b3a54f-f894-e2aa-380c-368e5e050ffd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.000670] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450163, 'name': ReconfigVM_Task, 'duration_secs': 0.219212} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.001111] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870259', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'name': 'volume-760148f4-1011-4972-a0ed-b18b4fd3f079', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '38e9a714-87f8-422c-9cc5-09b6aec76198', 'attached_at': '', 'detached_at': '', 'volume_id': '760148f4-1011-4972-a0ed-b18b4fd3f079', 'serial': '760148f4-1011-4972-a0ed-b18b4fd3f079'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 883.001460] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.002339] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fc12a7-d25c-428c-a556-0f5f344c39bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.010130] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.010418] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f587f9f9-3a2b-4ea6-8796-16663a840973 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.107692] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 883.108125] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 883.108429] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Deleting the datastore file [datastore2] 38e9a714-87f8-422c-9cc5-09b6aec76198 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.108823] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e782349-95b0-4acc-ac6b-2763f9edc6ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.119624] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for the task: (returnval){ [ 883.119624] env[65503]: value = "task-4450166" [ 883.119624] env[65503]: _type = "Task" [ 883.119624] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.128893] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.309061] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.309849] env[65503]: DEBUG nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 883.314226] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.163s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.314593] env[65503]: DEBUG nova.objects.instance [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lazy-loading 'resources' on Instance uuid f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.319223] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f1346bf5-c7e6-441c-8447-0b561a614cf5 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.154s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.332268] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 883.332268] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5255c429-7291-1968-7bba-4ebf319ee404" [ 883.332268] env[65503]: _type = "HttpNfcLease" [ 883.332268] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 883.332513] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 883.332513] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5255c429-7291-1968-7bba-4ebf319ee404" [ 883.332513] env[65503]: _type = "HttpNfcLease" [ 883.332513] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 883.333430] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821d98c8-f60c-4b6e-b829-519ba8a4934d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.343648] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d7c7b6-69b3-95f5-0d72-c3af7b1115eb/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 883.343879] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d7c7b6-69b3-95f5-0d72-c3af7b1115eb/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 883.443946] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ad4971-03fa-047a-d76f-aa1f02f3c137, 'name': SearchDatastore_Task, 'duration_secs': 0.013138} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.444670] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.445014] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.445310] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f790b868-a6b9-4fa5-8a9f-fe8de96653f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.452038] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b3a54f-f894-e2aa-380c-368e5e050ffd, 'name': SearchDatastore_Task, 'duration_secs': 0.023303} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.452575] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.452854] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.453222] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.453461] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.453756] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.455031] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95ef8367-704e-460b-82dd-a7c636bd9f96 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.457967] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 883.457967] env[65503]: value = "task-4450167" [ 883.457967] env[65503]: _type = "Task" [ 883.457967] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.464989] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.465195] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.469163] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8b5f60e-b88d-49f0-bfb3-311d534520a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.471802] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.477201] env[65503]: DEBUG nova.compute.manager [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Received event network-changed-e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 883.477521] env[65503]: DEBUG nova.compute.manager [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Refreshing instance network info cache due to event network-changed-e2945b8a-8327-4ac8-8d42-fc828663c0e0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 883.477714] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Acquiring lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.477896] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Acquired lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.478122] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Refreshing network info cache for port e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 883.482029] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9c423f39-e32c-4fe3-a87b-1bc9d0443428 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.485796] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 883.485796] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523cdd9c-8c96-14e6-40d2-aa1abbaeaa0d" [ 883.485796] env[65503]: _type = "Task" [ 883.485796] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.497603] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523cdd9c-8c96-14e6-40d2-aa1abbaeaa0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.631159] env[65503]: DEBUG oslo_vmware.api [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Task: {'id': task-4450166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140637} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.631513] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.631605] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 883.631754] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 883.631919] env[65503]: INFO nova.compute.manager [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Took 2.33 seconds to destroy the instance on the hypervisor. [ 883.632204] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 883.632396] env[65503]: DEBUG nova.compute.manager [-] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 883.632510] env[65503]: DEBUG nova.network.neutron [-] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 883.632906] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 883.637695] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 883.638814] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 883.664045] env[65503]: DEBUG nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 883.695967] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 883.696234] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.696386] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 883.696565] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.696738] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 883.696894] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 883.697116] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.697273] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 883.697436] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 883.697592] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 883.697759] env[65503]: DEBUG nova.virt.hardware [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 883.698687] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379fffa6-03fd-4866-bf1b-b76802bd5af4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.708984] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67001e8a-a1c8-4c7b-806d-e49174700560 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.714117] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 883.822083] env[65503]: DEBUG nova.compute.utils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 883.824048] env[65503]: DEBUG nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 883.824259] env[65503]: DEBUG nova.network.neutron [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 883.824592] env[65503]: WARNING neutronclient.v2_0.client [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 883.825401] env[65503]: WARNING neutronclient.v2_0.client [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 883.826036] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 883.826435] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 883.977226] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450167, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.986269] env[65503]: WARNING neutronclient.v2_0.client [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 883.987194] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 883.987800] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 884.012354] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523cdd9c-8c96-14e6-40d2-aa1abbaeaa0d, 'name': SearchDatastore_Task, 'duration_secs': 0.012772} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.013740] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a813c1-5da8-4ae3-9518-a335168b36a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.025680] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 884.025680] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5288d04c-7347-8f8b-3d4b-6856f8e575d2" [ 884.025680] env[65503]: _type = "Task" [ 884.025680] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.036279] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5288d04c-7347-8f8b-3d4b-6856f8e575d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.308310] env[65503]: DEBUG nova.network.neutron [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Successfully updated port: 3e858b14-8f22-4201-9e89-d017fcba1f2d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 884.335109] env[65503]: DEBUG nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 884.371981] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dbd53d-1d84-4156-aaa9-458db84be11a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.385261] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6953519e-fd46-4458-aab9-2441bd871c5e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.395489] env[65503]: DEBUG nova.policy [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9337f2cb77a24772aba3ef0eb341f2d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd6e7f469b7d408fae0621171c096f0a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 884.438255] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34995a5-d135-475a-979c-a56d2af9e423 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.450369] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d44304-fb8e-40cd-ba06-e634808176c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.478642] env[65503]: DEBUG nova.compute.provider_tree [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.486346] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.739636} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.486346] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.486346] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.486346] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eda0a087-357b-4afa-9295-c55d4995e19f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.495965] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 884.495965] env[65503]: value = "task-4450168" [ 884.495965] env[65503]: _type = "Task" [ 884.495965] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.510660] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.540056] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5288d04c-7347-8f8b-3d4b-6856f8e575d2, 'name': SearchDatastore_Task, 'duration_secs': 0.056933} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.540707] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.541076] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 2effe3e4-ea22-4d9f-8f5c-38ee661611e3/2effe3e4-ea22-4d9f-8f5c-38ee661611e3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 884.541431] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c361fc0-1c7f-47f5-934b-53240190c4f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.552945] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 884.552945] env[65503]: value = "task-4450169" [ 884.552945] env[65503]: _type = "Task" [ 884.552945] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.562636] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.684955] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 884.688785] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 884.762261] env[65503]: DEBUG nova.network.neutron [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Successfully created port: a95339cb-0433-44f3-992a-1680008ef082 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 884.816757] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "refresh_cache-f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.817341] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquired lock "refresh_cache-f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.817617] env[65503]: DEBUG nova.network.neutron [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 884.818887] env[65503]: DEBUG nova.network.neutron [-] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 884.827346] env[65503]: WARNING neutronclient.v2_0.client [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 884.828036] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 884.828479] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 884.979576] env[65503]: DEBUG nova.scheduler.client.report [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.016983] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112668} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.017563] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.019125] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fe4d31-1d08-4123-8b98-4b61263f5ed9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.057619] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.058137] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43cbe34e-edd2-42e3-b284-98f7fa88966b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.086742] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450169, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.088678] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 885.088678] env[65503]: value = "task-4450170" [ 885.088678] env[65503]: _type = "Task" [ 885.088678] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.099346] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450170, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.324834] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 885.325522] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 885.334280] env[65503]: INFO nova.compute.manager [-] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Took 1.70 seconds to deallocate network for instance. [ 885.351496] env[65503]: DEBUG nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 885.379765] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 885.380036] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.380153] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 885.380335] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.380473] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 885.380620] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 885.380810] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 885.380961] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 885.381128] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 885.381292] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 885.381453] env[65503]: DEBUG nova.virt.hardware [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 885.382443] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d8759a-f6d3-4a6a-8e6f-21f7c1579559 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.391958] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d34036-6f94-460b-950c-bd993e2c03a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.489641] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.493381] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.505s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.493631] env[65503]: DEBUG nova.objects.instance [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lazy-loading 'resources' on Instance uuid 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.516911] env[65503]: INFO nova.scheduler.client.report [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Deleted allocations for instance f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d [ 885.524765] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updated VIF entry in instance network info cache for port e2945b8a-8327-4ac8-8d42-fc828663c0e0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 885.525137] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 885.570533] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450169, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702139} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.571568] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 2effe3e4-ea22-4d9f-8f5c-38ee661611e3/2effe3e4-ea22-4d9f-8f5c-38ee661611e3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.572017] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.573256] env[65503]: DEBUG nova.network.neutron [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 885.576080] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-578ef326-f8ff-4bee-ab4f-e2c864680106 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.586625] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 885.586625] env[65503]: value = "task-4450171" [ 885.586625] env[65503]: _type = "Task" [ 885.586625] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.602895] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450171, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.608649] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450170, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.625333] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 885.625333] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 885.804980] env[65503]: WARNING neutronclient.v2_0.client [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 885.805684] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 885.806091] env[65503]: WARNING openstack [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 885.922489] env[65503]: INFO nova.compute.manager [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Took 0.59 seconds to detach 1 volumes for instance. [ 885.930118] env[65503]: DEBUG nova.compute.manager [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Deleting volume: 760148f4-1011-4972-a0ed-b18b4fd3f079 {{(pid=65503) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3320}} [ 886.031023] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Releasing lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.031023] env[65503]: DEBUG nova.compute.manager [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 886.031023] env[65503]: DEBUG nova.compute.manager [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing instance network info cache due to event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 886.031023] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Acquiring lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.031023] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Acquired lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.031023] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 886.036057] env[65503]: DEBUG oslo_concurrency.lockutils [None req-407e7bc0-4f55-4300-817b-28fa81663e33 tempest-ServersV294TestFqdnHostnames-1006188174 tempest-ServersV294TestFqdnHostnames-1006188174-project-member] Lock "f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.334s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.043408] env[65503]: DEBUG nova.network.neutron [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Updating instance_info_cache with network_info: [{"id": "3e858b14-8f22-4201-9e89-d017fcba1f2d", "address": "fa:16:3e:60:ee:17", "network": {"id": "112b3034-6a78-4a96-a30f-26ec69def675", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2101825638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b01ea2e165f5478ebdb544d083648600", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e858b14-8f", "ovs_interfaceid": "3e858b14-8f22-4201-9e89-d017fcba1f2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 886.109251] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450171, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070197} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.115447] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.115663] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450170, 'name': ReconfigVM_Task, 'duration_secs': 0.778837} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.120654] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77be0078-4dc3-4783-9833-34283b1624b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.125897] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 0001f4db-3073-411c-8d60-6d8528ef263a/0001f4db-3073-411c-8d60-6d8528ef263a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.128185] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a74a85f6-3365-4615-b761-54490bb4323f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.158064] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 2effe3e4-ea22-4d9f-8f5c-38ee661611e3/2effe3e4-ea22-4d9f-8f5c-38ee661611e3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.163039] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2737392a-b23d-4055-8776-77f5306a8730 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.180120] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 886.180120] env[65503]: value = "task-4450173" [ 886.180120] env[65503]: _type = "Task" [ 886.180120] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.187997] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 886.187997] env[65503]: value = "task-4450174" [ 886.187997] env[65503]: _type = "Task" [ 886.187997] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.194383] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450173, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.207980] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450174, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.288555] env[65503]: DEBUG nova.compute.manager [req-6aa987d8-aab2-4093-bcd0-3cebf4d4b57c req-ff2344c1-91cc-4266-93ae-0250297e84b2 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Received event network-vif-plugged-3e858b14-8f22-4201-9e89-d017fcba1f2d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 886.288555] env[65503]: DEBUG oslo_concurrency.lockutils [req-6aa987d8-aab2-4093-bcd0-3cebf4d4b57c req-ff2344c1-91cc-4266-93ae-0250297e84b2 service nova] Acquiring lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.288892] env[65503]: DEBUG oslo_concurrency.lockutils [req-6aa987d8-aab2-4093-bcd0-3cebf4d4b57c req-ff2344c1-91cc-4266-93ae-0250297e84b2 service nova] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.288892] env[65503]: DEBUG oslo_concurrency.lockutils [req-6aa987d8-aab2-4093-bcd0-3cebf4d4b57c req-ff2344c1-91cc-4266-93ae-0250297e84b2 service nova] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.289380] env[65503]: DEBUG nova.compute.manager [req-6aa987d8-aab2-4093-bcd0-3cebf4d4b57c req-ff2344c1-91cc-4266-93ae-0250297e84b2 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] No waiting events found dispatching network-vif-plugged-3e858b14-8f22-4201-9e89-d017fcba1f2d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 886.290495] env[65503]: WARNING nova.compute.manager [req-6aa987d8-aab2-4093-bcd0-3cebf4d4b57c req-ff2344c1-91cc-4266-93ae-0250297e84b2 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Received unexpected event network-vif-plugged-3e858b14-8f22-4201-9e89-d017fcba1f2d for instance with vm_state building and task_state spawning. [ 886.479755] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.516300] env[65503]: DEBUG nova.network.neutron [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Successfully updated port: a95339cb-0433-44f3-992a-1680008ef082 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 886.536139] env[65503]: WARNING neutronclient.v2_0.client [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 886.537398] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 886.537472] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 886.549888] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Releasing lock "refresh_cache-f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.550371] env[65503]: DEBUG nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Instance network_info: |[{"id": "3e858b14-8f22-4201-9e89-d017fcba1f2d", "address": "fa:16:3e:60:ee:17", "network": {"id": "112b3034-6a78-4a96-a30f-26ec69def675", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2101825638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b01ea2e165f5478ebdb544d083648600", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e858b14-8f", "ovs_interfaceid": "3e858b14-8f22-4201-9e89-d017fcba1f2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 886.550823] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:ee:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e858b14-8f22-4201-9e89-d017fcba1f2d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.558555] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Creating folder: Project (b01ea2e165f5478ebdb544d083648600). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 886.562128] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7eada119-c679-4022-b83d-eb89ced765e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.576720] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Created folder: Project (b01ea2e165f5478ebdb544d083648600) in parent group-v870190. [ 886.576956] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Creating folder: Instances. Parent ref: group-v870388. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 886.577254] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d767ec47-7bc6-4f11-8f0d-8e8fc5a55948 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.584623] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b807f5-be2e-4674-9261-7dfd8fdc7eb7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.592118] env[65503]: DEBUG nova.compute.manager [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Received event network-vif-deleted-26c792db-2c05-4dd4-8223-013b2d5d3f9f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 886.592333] env[65503]: DEBUG nova.compute.manager [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 886.592492] env[65503]: DEBUG nova.compute.manager [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing instance network info cache due to event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 886.592734] env[65503]: DEBUG oslo_concurrency.lockutils [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Acquiring lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.595338] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Created folder: Instances in parent group-v870388. [ 886.595578] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 886.596123] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 886.596972] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1195e06-b4ee-4058-85b3-a92b6105b26d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.617833] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9edd27-c0e5-4a55-ab66-cbaaa5e39dad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.623054] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.623054] env[65503]: value = "task-4450177" [ 886.623054] env[65503]: _type = "Task" [ 886.623054] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.661775] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e6c166-4169-4312-84d0-8fad7b5b9ee5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.664729] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450177, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.672068] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a23d2bb-c9b3-40dc-9e57-1ed37d6741cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.692992] env[65503]: DEBUG nova.compute.provider_tree [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.707931] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 886.707931] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 886.722858] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450173, 'name': Rename_Task, 'duration_secs': 0.243421} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.726927] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.727648] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450174, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.727898] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02188c43-865d-4ce6-bed8-232b44f53a01 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.736701] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 886.736701] env[65503]: value = "task-4450178" [ 886.736701] env[65503]: _type = "Task" [ 886.736701] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.749280] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.773736] env[65503]: WARNING neutronclient.v2_0.client [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 886.774427] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 886.774757] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 886.878715] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updated VIF entry in instance network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 886.879117] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 887.019834] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "refresh_cache-bc0c0066-b672-4385-8d68-c14e3635af4e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.019834] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "refresh_cache-bc0c0066-b672-4385-8d68-c14e3635af4e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.019834] env[65503]: DEBUG nova.network.neutron [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 887.135081] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450177, 'name': CreateVM_Task, 'duration_secs': 0.489491} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.135310] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 887.135856] env[65503]: WARNING neutronclient.v2_0.client [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 887.136253] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.136404] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.136734] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 887.137017] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89887f91-7e9e-4309-8290-d0983152b598 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.143770] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 887.143770] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fd6383-860a-919a-d4df-3f90ed255413" [ 887.143770] env[65503]: _type = "Task" [ 887.143770] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.153670] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fd6383-860a-919a-d4df-3f90ed255413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.201076] env[65503]: DEBUG nova.scheduler.client.report [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.216607] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450174, 'name': ReconfigVM_Task, 'duration_secs': 0.621643} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.216948] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 2effe3e4-ea22-4d9f-8f5c-38ee661611e3/2effe3e4-ea22-4d9f-8f5c-38ee661611e3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.217891] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9af6ec1f-2463-4751-8316-a0a5d38b68ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.228355] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 887.228355] env[65503]: value = "task-4450179" [ 887.228355] env[65503]: _type = "Task" [ 887.228355] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.240978] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450179, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.253534] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450178, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.384317] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Releasing lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.384317] env[65503]: DEBUG nova.compute.manager [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 887.384317] env[65503]: DEBUG nova.compute.manager [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing instance network info cache due to event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 887.384317] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Acquiring lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.384317] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Acquired lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.384317] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 887.523278] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 887.523683] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 887.602868] env[65503]: DEBUG nova.network.neutron [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 887.658895] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fd6383-860a-919a-d4df-3f90ed255413, 'name': SearchDatastore_Task, 'duration_secs': 0.015253} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.659505] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.659857] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.660235] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.660480] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.660768] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.661783] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cac9a6d-efdf-4ec8-b5f7-ccee4447a694 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.675761] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.675761] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 887.676854] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-709849da-398d-4674-b2c7-f7bb42fe023a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.684665] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 887.684665] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e54f85-bc58-a77c-ea62-193a448f6651" [ 887.684665] env[65503]: _type = "Task" [ 887.684665] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.695752] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e54f85-bc58-a77c-ea62-193a448f6651, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.709069] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.215s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.711971] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.369s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.712291] env[65503]: DEBUG nova.objects.instance [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lazy-loading 'resources' on Instance uuid 972a50ed-759a-4312-9314-9bf01a03fc3a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.734178] env[65503]: INFO nova.scheduler.client.report [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Deleted allocations for instance 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51 [ 887.756168] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450179, 'name': Rename_Task, 'duration_secs': 0.240362} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.759803] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.766078] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ce0d1d0-a543-4e44-af92-e65b1a11ba54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.779691] env[65503]: DEBUG oslo_vmware.api [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450178, 'name': PowerOnVM_Task, 'duration_secs': 0.64292} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.779691] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.779691] env[65503]: DEBUG nova.compute.manager [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 887.782885] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627d74a8-a16c-4f3e-b1f2-a4578d530d8d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.792298] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 887.792298] env[65503]: value = "task-4450180" [ 887.792298] env[65503]: _type = "Task" [ 887.792298] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.817206] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450180, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.891093] env[65503]: WARNING neutronclient.v2_0.client [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 887.892196] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 887.893058] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 887.913333] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 887.913751] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 888.176987] env[65503]: WARNING neutronclient.v2_0.client [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 888.177975] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 888.178358] env[65503]: WARNING openstack [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 888.201037] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e54f85-bc58-a77c-ea62-193a448f6651, 'name': SearchDatastore_Task, 'duration_secs': 0.015338} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.202331] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d482b6d-88e7-4714-ac81-8dbf13583fad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.212448] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 888.212448] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528e7e8e-e366-58a5-27f2-6d3152bafc9d" [ 888.212448] env[65503]: _type = "Task" [ 888.212448] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.233700] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528e7e8e-e366-58a5-27f2-6d3152bafc9d, 'name': SearchDatastore_Task, 'duration_secs': 0.012902} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.233905] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.234363] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee/f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 888.234606] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8274d12-1246-4ae1-b5f4-3b59a974c444 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.248076] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 888.248076] env[65503]: value = "task-4450181" [ 888.248076] env[65503]: _type = "Task" [ 888.248076] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.253455] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25abce8b-e18b-4ac4-8e56-a108eef336c8 tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "9809fc8b-3842-4ce3-bb63-8ea37ee3bf51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.236s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.264728] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.274638] env[65503]: DEBUG nova.compute.manager [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 888.279996] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 888.281269] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 888.306649] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450180, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.323482] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.422540] env[65503]: DEBUG nova.network.neutron [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Updating instance_info_cache with network_info: [{"id": "a95339cb-0433-44f3-992a-1680008ef082", "address": "fa:16:3e:73:24:bf", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95339cb-04", "ovs_interfaceid": "a95339cb-0433-44f3-992a-1680008ef082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 888.464946] env[65503]: WARNING neutronclient.v2_0.client [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 888.466041] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 888.466285] env[65503]: WARNING openstack [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 888.607204] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updated VIF entry in instance network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 888.607580] env[65503]: DEBUG nova.network.neutron [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 888.743678] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ab6712-ad53-4330-9d9a-aac011d8e442 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.758628] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e0bd30-2f91-49f3-adc7-996c42373c78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.766125] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450181, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.813891] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc16ed6-737e-4c0e-a37e-13356fb9f236 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.828120] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8162a1-3ec2-493c-bab9-39d776284525 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.832673] env[65503]: DEBUG oslo_vmware.api [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450180, 'name': PowerOnVM_Task, 'duration_secs': 0.67392} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.833985] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.838028] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.838028] env[65503]: INFO nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Took 8.81 seconds to spawn the instance on the hypervisor. [ 888.838028] env[65503]: DEBUG nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 888.838028] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d4540d-9316-46ae-bc85-f4da9aeb1f03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.848539] env[65503]: DEBUG nova.compute.provider_tree [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.928714] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "refresh_cache-bc0c0066-b672-4385-8d68-c14e3635af4e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.929198] env[65503]: DEBUG nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Instance network_info: |[{"id": "a95339cb-0433-44f3-992a-1680008ef082", "address": "fa:16:3e:73:24:bf", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95339cb-04", "ovs_interfaceid": "a95339cb-0433-44f3-992a-1680008ef082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 888.929805] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:24:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a95339cb-0433-44f3-992a-1680008ef082', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.940529] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 888.940882] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.941312] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec450214-9b50-4663-ac26-686fbcc50500 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.965120] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.965120] env[65503]: value = "task-4450182" [ 888.965120] env[65503]: _type = "Task" [ 888.965120] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.975153] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450182, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.984754] env[65503]: INFO nova.compute.manager [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Rebuilding instance [ 889.047057] env[65503]: DEBUG nova.compute.manager [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 889.047057] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b8a89a-a797-4b92-95ee-aea4b4db3e5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.114824] env[65503]: DEBUG oslo_concurrency.lockutils [req-9298b762-5dde-4663-b868-4cd61406b130 req-18c997bf-7d95-4848-bc73-d58581e3275d service nova] Releasing lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.115345] env[65503]: DEBUG oslo_concurrency.lockutils [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Acquired lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.115554] env[65503]: DEBUG nova.network.neutron [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 889.261248] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.740935} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.261564] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee/f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 889.261775] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.262084] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abc1dc58-89e3-40cf-a727-c823cd694da2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.272844] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 889.272844] env[65503]: value = "task-4450183" [ 889.272844] env[65503]: _type = "Task" [ 889.272844] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.284588] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450183, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.347116] env[65503]: DEBUG nova.compute.manager [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Received event network-changed-3e858b14-8f22-4201-9e89-d017fcba1f2d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 889.351433] env[65503]: DEBUG nova.compute.manager [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Refreshing instance network info cache due to event network-changed-3e858b14-8f22-4201-9e89-d017fcba1f2d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 889.351433] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Acquiring lock "refresh_cache-f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.351433] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Acquired lock "refresh_cache-f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.351433] env[65503]: DEBUG nova.network.neutron [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Refreshing network info cache for port 3e858b14-8f22-4201-9e89-d017fcba1f2d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 889.352308] env[65503]: DEBUG nova.scheduler.client.report [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.374508] env[65503]: INFO nova.compute.manager [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Took 47.18 seconds to build instance. [ 889.481680] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450182, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.497815] env[65503]: DEBUG nova.compute.manager [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 889.498019] env[65503]: DEBUG nova.compute.manager [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing instance network info cache due to event network-changed-c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 889.499935] env[65503]: DEBUG oslo_concurrency.lockutils [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Acquiring lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.618406] env[65503]: WARNING neutronclient.v2_0.client [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.619203] env[65503]: WARNING openstack [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 889.619582] env[65503]: WARNING openstack [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 889.786940] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450183, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110264} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.787658] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 889.788299] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dd3ab9-6cb5-402a-aa49-4de049b4a331 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.818656] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee/f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.819879] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-840d7074-9677-4c3c-9d3d-0fd7215ec5c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.841399] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 889.841399] env[65503]: value = "task-4450184" [ 889.841399] env[65503]: _type = "Task" [ 889.841399] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.851189] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "0001f4db-3073-411c-8d60-6d8528ef263a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.851446] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "0001f4db-3073-411c-8d60-6d8528ef263a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.852081] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "0001f4db-3073-411c-8d60-6d8528ef263a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.852081] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "0001f4db-3073-411c-8d60-6d8528ef263a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.852081] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "0001f4db-3073-411c-8d60-6d8528ef263a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.853745] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450184, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.854418] env[65503]: INFO nova.compute.manager [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Terminating instance [ 889.856056] env[65503]: WARNING neutronclient.v2_0.client [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.856844] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 889.857493] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 889.867796] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.871633] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.976s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.871633] env[65503]: DEBUG nova.objects.instance [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lazy-loading 'resources' on Instance uuid 52701da5-2908-40f8-b1c5-bc30f17d51a0 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.879275] env[65503]: DEBUG oslo_concurrency.lockutils [None req-12ca4f3c-9ceb-4a71-a810-805ec183349f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.385s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.906015] env[65503]: INFO nova.scheduler.client.report [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleted allocations for instance 972a50ed-759a-4312-9314-9bf01a03fc3a [ 889.979891] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450182, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.055360] env[65503]: WARNING openstack [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.056065] env[65503]: WARNING openstack [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.071442] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.071831] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f83f01e3-7a2d-41c2-a13f-c65cf613dfb7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.083281] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 890.083281] env[65503]: value = "task-4450185" [ 890.083281] env[65503]: _type = "Task" [ 890.083281] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.097219] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.312271] env[65503]: WARNING neutronclient.v2_0.client [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.313127] env[65503]: WARNING openstack [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.313548] env[65503]: WARNING openstack [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.347853] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.348311] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.359462] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450184, 'name': ReconfigVM_Task, 'duration_secs': 0.460734} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.359774] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Reconfigured VM instance instance-00000046 to attach disk [datastore2] f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee/f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 890.360484] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93cc6d92-70e6-4e16-a4d1-40ee6bb2f8e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.369548] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 890.369548] env[65503]: value = "task-4450186" [ 890.369548] env[65503]: _type = "Task" [ 890.369548] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.370278] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "refresh_cache-0001f4db-3073-411c-8d60-6d8528ef263a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.370460] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquired lock "refresh_cache-0001f4db-3073-411c-8d60-6d8528ef263a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.370636] env[65503]: DEBUG nova.network.neutron [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 890.392575] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450186, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.417308] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b6181bab-1a48-486e-96ff-de4b7fc08f20 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "972a50ed-759a-4312-9314-9bf01a03fc3a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.466s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.482293] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450182, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.598099] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450185, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.810200] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7f01e2-0ddb-4b41-9355-07b9da5a760f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.820133] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291b005c-2eb1-4b19-9f34-dda194768808 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.865817] env[65503]: WARNING neutronclient.v2_0.client [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.865817] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.865817] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.875793] env[65503]: DEBUG nova.network.neutron [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updated VIF entry in instance network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 890.876166] env[65503]: DEBUG nova.network.neutron [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 890.877984] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840dc413-49a5-438e-a6eb-01a703d76dcd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.881422] env[65503]: WARNING neutronclient.v2_0.client [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.882078] env[65503]: WARNING openstack [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.882428] env[65503]: WARNING openstack [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.891951] env[65503]: DEBUG oslo_concurrency.lockutils [req-05b9f335-39f0-464a-96fe-fcd363b00336 req-0a5e751c-4e9a-4f11-aadd-a7cc472ea79c service nova] Releasing lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.897121] env[65503]: DEBUG oslo_concurrency.lockutils [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Acquired lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.897286] env[65503]: DEBUG nova.network.neutron [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Refreshing network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 890.910707] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd97bc9-e5c4-4831-be0c-837db8504df1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.918027] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450186, 'name': Rename_Task, 'duration_secs': 0.26789} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.918027] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.918027] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-071c50f9-8dc2-4bd0-b340-31eeacc31bb8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.928315] env[65503]: DEBUG nova.compute.provider_tree [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.930744] env[65503]: DEBUG nova.network.neutron [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 890.935045] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 890.935045] env[65503]: value = "task-4450187" [ 890.935045] env[65503]: _type = "Task" [ 890.935045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.956033] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450187, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.984297] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450182, 'name': CreateVM_Task, 'duration_secs': 1.54683} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.984814] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 890.985242] env[65503]: WARNING neutronclient.v2_0.client [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.985622] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.985764] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.986087] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 890.986361] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-365cd6e8-1576-4309-a220-2777510e1222 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.994995] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 890.994995] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521c3912-aa63-640c-2246-29195ae0b55f" [ 890.994995] env[65503]: _type = "Task" [ 890.994995] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.001452] env[65503]: DEBUG nova.network.neutron [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Updated VIF entry in instance network info cache for port 3e858b14-8f22-4201-9e89-d017fcba1f2d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 891.001882] env[65503]: DEBUG nova.network.neutron [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Updating instance_info_cache with network_info: [{"id": "3e858b14-8f22-4201-9e89-d017fcba1f2d", "address": "fa:16:3e:60:ee:17", "network": {"id": "112b3034-6a78-4a96-a30f-26ec69def675", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2101825638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b01ea2e165f5478ebdb544d083648600", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e858b14-8f", "ovs_interfaceid": "3e858b14-8f22-4201-9e89-d017fcba1f2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 891.011378] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521c3912-aa63-640c-2246-29195ae0b55f, 'name': SearchDatastore_Task, 'duration_secs': 0.011605} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.014849] env[65503]: DEBUG nova.network.neutron [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 891.016237] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.016474] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.016845] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.016917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.017091] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.018236] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d4a501c-12f2-4c9c-89a0-a242bd943abf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.029274] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.029475] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.030638] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76974e8c-65c6-4ffb-ba35-cd138d55d06b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.038707] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 891.038707] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52565dfc-b6b3-e922-1dce-0de7bf0b9bd8" [ 891.038707] env[65503]: _type = "Task" [ 891.038707] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.050879] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52565dfc-b6b3-e922-1dce-0de7bf0b9bd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.095702] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450185, 'name': PowerOffVM_Task, 'duration_secs': 0.52668} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.096028] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.096769] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.097736] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7fa9e1-80c5-4562-a702-5eaacf6caf61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.107767] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.108670] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b13416b6-863f-4528-b8a2-03db0bfad779 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.140535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.140774] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.141010] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Deleting the datastore file [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.141370] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3d5f640-8854-4e13-a196-ef6348c74d5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.152904] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 891.152904] env[65503]: value = "task-4450189" [ 891.152904] env[65503]: _type = "Task" [ 891.152904] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.163532] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.402281] env[65503]: WARNING neutronclient.v2_0.client [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 891.402281] env[65503]: WARNING openstack [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 891.402281] env[65503]: WARNING openstack [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 891.438191] env[65503]: DEBUG nova.scheduler.client.report [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.452563] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450187, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.504929] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Releasing lock "refresh_cache-f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.505299] env[65503]: DEBUG nova.compute.manager [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Received event network-vif-plugged-a95339cb-0433-44f3-992a-1680008ef082 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 891.505454] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Acquiring lock "bc0c0066-b672-4385-8d68-c14e3635af4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.505914] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.505914] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.506021] env[65503]: DEBUG nova.compute.manager [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] No waiting events found dispatching network-vif-plugged-a95339cb-0433-44f3-992a-1680008ef082 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 891.506131] env[65503]: WARNING nova.compute.manager [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Received unexpected event network-vif-plugged-a95339cb-0433-44f3-992a-1680008ef082 for instance with vm_state building and task_state spawning. [ 891.506310] env[65503]: DEBUG nova.compute.manager [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Received event network-changed-a95339cb-0433-44f3-992a-1680008ef082 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 891.506488] env[65503]: DEBUG nova.compute.manager [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Refreshing instance network info cache due to event network-changed-a95339cb-0433-44f3-992a-1680008ef082. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 891.506646] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Acquiring lock "refresh_cache-bc0c0066-b672-4385-8d68-c14e3635af4e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.506783] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Acquired lock "refresh_cache-bc0c0066-b672-4385-8d68-c14e3635af4e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.506937] env[65503]: DEBUG nova.network.neutron [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Refreshing network info cache for port a95339cb-0433-44f3-992a-1680008ef082 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 891.518752] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Releasing lock "refresh_cache-0001f4db-3073-411c-8d60-6d8528ef263a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.519336] env[65503]: DEBUG nova.compute.manager [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 891.519555] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.523313] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292c3b8a-ca60-47bb-9aae-ba3942906e5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.527696] env[65503]: WARNING openstack [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 891.527823] env[65503]: WARNING openstack [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 891.537261] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.537552] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.541941] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.545406] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3143d5f5-4366-4fec-a4f3-eae7a091d551 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.559314] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52565dfc-b6b3-e922-1dce-0de7bf0b9bd8, 'name': SearchDatastore_Task, 'duration_secs': 0.012205} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.562094] env[65503]: DEBUG oslo_vmware.api [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 891.562094] env[65503]: value = "task-4450190" [ 891.562094] env[65503]: _type = "Task" [ 891.562094] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.566946] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78c25372-1b8b-43b5-869f-ef4309c15aef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.579049] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 891.579049] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ebb73-7bb5-a65b-2def-362bb3f187d3" [ 891.579049] env[65503]: _type = "Task" [ 891.579049] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.582438] env[65503]: DEBUG oslo_vmware.api [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.593309] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ebb73-7bb5-a65b-2def-362bb3f187d3, 'name': SearchDatastore_Task, 'duration_secs': 0.011769} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.594669] env[65503]: WARNING neutronclient.v2_0.client [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 891.595346] env[65503]: WARNING openstack [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 891.595706] env[65503]: WARNING openstack [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 891.604388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.604749] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] bc0c0066-b672-4385-8d68-c14e3635af4e/bc0c0066-b672-4385-8d68-c14e3635af4e.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 891.606568] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbc7230c-234d-4406-9c54-8b350eaf8c6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.616789] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 891.616789] env[65503]: value = "task-4450191" [ 891.616789] env[65503]: _type = "Task" [ 891.616789] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.630504] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450191, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.667233] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121151} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.667656] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.667961] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 891.668228] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.722177] env[65503]: DEBUG nova.network.neutron [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updated VIF entry in instance network info cache for port c56d3579-9224-4ab6-8078-adc2f2c2803b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 891.722782] env[65503]: DEBUG nova.network.neutron [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [{"id": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "address": "fa:16:3e:05:3f:fe", "network": {"id": "ede30400-e312-446b-bf0d-2e535869001e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-613624937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cd4b8037fb204e7ebaf5f34c15096b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc56d3579-92", "ovs_interfaceid": "c56d3579-9224-4ab6-8078-adc2f2c2803b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 891.948120] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.078s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.954433] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.726s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.954433] env[65503]: DEBUG nova.objects.instance [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lazy-loading 'resources' on Instance uuid 9dbaff4f-ab02-481b-b51f-b134021d277c {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 891.963814] env[65503]: DEBUG oslo_vmware.api [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450187, 'name': PowerOnVM_Task, 'duration_secs': 0.811459} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.964939] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.964939] env[65503]: INFO nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Took 8.30 seconds to spawn the instance on the hypervisor. [ 891.964939] env[65503]: DEBUG nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 891.965760] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60239ba8-84de-4588-865f-558cd0340521 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.994252] env[65503]: INFO nova.scheduler.client.report [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Deleted allocations for instance 52701da5-2908-40f8-b1c5-bc30f17d51a0 [ 892.011111] env[65503]: WARNING neutronclient.v2_0.client [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 892.012016] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 892.012414] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 892.043929] env[65503]: DEBUG nova.compute.utils [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 892.053255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "39266117-e82e-48ae-932a-be04b1a7351a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.053795] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.054484] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "39266117-e82e-48ae-932a-be04b1a7351a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.054745] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.056610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.064542] env[65503]: INFO nova.compute.manager [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Terminating instance [ 892.086153] env[65503]: DEBUG oslo_vmware.api [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450190, 'name': PowerOffVM_Task, 'duration_secs': 0.243586} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.086590] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 892.086808] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.087172] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52697da6-fee3-4e0e-a7c4-85b6ef3552c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.108782] env[65503]: DEBUG nova.compute.manager [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Received event network-changed-e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 892.108782] env[65503]: DEBUG nova.compute.manager [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Refreshing instance network info cache due to event network-changed-e2945b8a-8327-4ac8-8d42-fc828663c0e0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 892.108964] env[65503]: DEBUG oslo_concurrency.lockutils [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Acquiring lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.109197] env[65503]: DEBUG oslo_concurrency.lockutils [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Acquired lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.109544] env[65503]: DEBUG nova.network.neutron [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Refreshing network info cache for port e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 892.124804] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 892.125155] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 892.125369] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Deleting the datastore file [datastore2] 0001f4db-3073-411c-8d60-6d8528ef263a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.125690] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edeaada8-f747-4c0b-a597-88a0edd95389 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.132058] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450191, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.142700] env[65503]: DEBUG oslo_vmware.api [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for the task: (returnval){ [ 892.142700] env[65503]: value = "task-4450193" [ 892.142700] env[65503]: _type = "Task" [ 892.142700] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.153984] env[65503]: DEBUG oslo_vmware.api [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.166772] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 892.167285] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 892.227716] env[65503]: DEBUG oslo_concurrency.lockutils [req-b3004c88-3227-4094-a385-aae1c0d6c871 req-063500ad-ba53-4da3-a173-3455e0d7f7dd service nova] Releasing lock "refresh_cache-429b7542-c288-4a7a-9032-09881938b256" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.270700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "429b7542-c288-4a7a-9032-09881938b256" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.270700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "429b7542-c288-4a7a-9032-09881938b256" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.270700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "429b7542-c288-4a7a-9032-09881938b256-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.271020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "429b7542-c288-4a7a-9032-09881938b256-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.271020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "429b7542-c288-4a7a-9032-09881938b256-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.275109] env[65503]: INFO nova.compute.manager [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Terminating instance [ 892.280331] env[65503]: WARNING neutronclient.v2_0.client [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 892.281137] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 892.281455] env[65503]: WARNING openstack [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 892.360362] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.360623] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.360855] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.361058] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.361241] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.363547] env[65503]: INFO nova.compute.manager [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Terminating instance [ 892.384154] env[65503]: DEBUG nova.network.neutron [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Updated VIF entry in instance network info cache for port a95339cb-0433-44f3-992a-1680008ef082. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 892.384154] env[65503]: DEBUG nova.network.neutron [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Updating instance_info_cache with network_info: [{"id": "a95339cb-0433-44f3-992a-1680008ef082", "address": "fa:16:3e:73:24:bf", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95339cb-04", "ovs_interfaceid": "a95339cb-0433-44f3-992a-1680008ef082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 892.485758] env[65503]: INFO nova.compute.manager [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Took 49.71 seconds to build instance. [ 892.504142] env[65503]: DEBUG oslo_concurrency.lockutils [None req-809994db-4094-4210-8797-4d35ebf2fd54 tempest-ServerAddressesNegativeTestJSON-703093128 tempest-ServerAddressesNegativeTestJSON-703093128-project-member] Lock "52701da5-2908-40f8-b1c5-bc30f17d51a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.044s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.547548] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.570202] env[65503]: DEBUG nova.compute.manager [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 892.570445] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.571347] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa6c8f2-4455-4050-a4fd-24b64883a677 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.580671] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.580939] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e489b98-bb20-41cb-b584-ff032f216f7b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.587847] env[65503]: DEBUG oslo_vmware.api [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 892.587847] env[65503]: value = "task-4450194" [ 892.587847] env[65503]: _type = "Task" [ 892.587847] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.602885] env[65503]: DEBUG oslo_vmware.api [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.614779] env[65503]: WARNING neutronclient.v2_0.client [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 892.615703] env[65503]: WARNING openstack [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 892.616066] env[65503]: WARNING openstack [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 892.636604] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450191, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522899} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.639598] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] bc0c0066-b672-4385-8d68-c14e3635af4e/bc0c0066-b672-4385-8d68-c14e3635af4e.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 892.639833] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 892.640335] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d35c496-9390-4ab7-8ad1-7e4d57530192 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.649543] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 892.649543] env[65503]: value = "task-4450195" [ 892.649543] env[65503]: _type = "Task" [ 892.649543] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.660371] env[65503]: DEBUG oslo_vmware.api [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Task: {'id': task-4450193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133504} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.663883] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.664137] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.664324] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.664501] env[65503]: INFO nova.compute.manager [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 892.664750] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 892.665936] env[65503]: DEBUG nova.compute.manager [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 892.665936] env[65503]: DEBUG nova.network.neutron [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 892.665936] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 892.666136] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 892.666831] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 892.680859] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.700786] env[65503]: DEBUG nova.network.neutron [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 892.701197] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 892.725607] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 892.725906] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 892.726070] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 892.726276] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 892.726423] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 892.726560] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 892.726827] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.726973] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 892.727178] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 892.727340] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 892.727530] env[65503]: DEBUG nova.virt.hardware [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 892.729090] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf8f9f9-799b-4b33-a667-779926d5f449 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.744667] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76050e33-755c-4065-ab4c-b2f0f94026ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.764410] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.770853] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 892.772719] env[65503]: WARNING openstack [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 892.773545] env[65503]: WARNING openstack [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 892.784754] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.785451] env[65503]: DEBUG nova.compute.manager [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 892.785782] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.786649] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db033597-ca7f-49b2-bb51-53f14d9d5d73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.803407] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76834cbb-77a4-4548-aebc-0fded612fe04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.823293] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.823293] env[65503]: value = "task-4450196" [ 892.823293] env[65503]: _type = "Task" [ 892.823293] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.823617] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.823952] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e090bb4-039f-4159-9510-c283ab69e4ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.836457] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450196, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.841483] env[65503]: DEBUG oslo_vmware.api [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 892.841483] env[65503]: value = "task-4450197" [ 892.841483] env[65503]: _type = "Task" [ 892.841483] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.851458] env[65503]: DEBUG oslo_vmware.api [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450197, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.858238] env[65503]: WARNING neutronclient.v2_0.client [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 892.858941] env[65503]: WARNING openstack [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 892.859399] env[65503]: WARNING openstack [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 892.868347] env[65503]: DEBUG nova.compute.manager [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 892.868587] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.872391] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7488c3f-795e-4e3f-99cc-82fd240f5744 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.881564] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.881870] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-556b0be3-2e4e-468d-97d3-2eb5b63233a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.887348] env[65503]: DEBUG oslo_concurrency.lockutils [req-78cee3ab-a720-4f09-8b43-a381a8fd78d6 req-725fd7ce-93d4-443d-bf4d-51a685212a81 service nova] Releasing lock "refresh_cache-bc0c0066-b672-4385-8d68-c14e3635af4e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.891599] env[65503]: DEBUG oslo_vmware.api [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 892.891599] env[65503]: value = "task-4450198" [ 892.891599] env[65503]: _type = "Task" [ 892.891599] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.904335] env[65503]: DEBUG oslo_vmware.api [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.944463] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c8895d-c2a6-45a4-956c-cad950f56f14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.953714] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8044965-852b-4f3b-a445-4d3478d21a67 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.960155] env[65503]: DEBUG nova.network.neutron [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updated VIF entry in instance network info cache for port e2945b8a-8327-4ac8-8d42-fc828663c0e0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 892.960556] env[65503]: DEBUG nova.network.neutron [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 892.989600] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ffec2cb4-736c-4591-8321-31da074e871e tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.408s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.991464] env[65503]: DEBUG oslo_concurrency.lockutils [req-510acb40-a3b4-46a7-968e-41fc20c50b43 req-562a0eea-6c2b-4f85-a8ee-bd2477158cc0 service nova] Releasing lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.992464] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d2a165-7605-4ae1-8994-4043a2bbf3fb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.002682] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723dd8a4-8161-407d-aa7a-3fef1490e954 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.019829] env[65503]: DEBUG nova.compute.provider_tree [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.103431] env[65503]: DEBUG oslo_vmware.api [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450194, 'name': PowerOffVM_Task, 'duration_secs': 0.2747} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.103683] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.103868] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.104326] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d3f8db3-2a19-4495-80cb-fd9411ad3fbb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.160786] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101183} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.161065] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 893.161916] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3856c998-fa4c-464f-a68b-21e6586c1df7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.198040] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] bc0c0066-b672-4385-8d68-c14e3635af4e/bc0c0066-b672-4385-8d68-c14e3635af4e.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.199215] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4eaa63b8-39a4-47bd-99aa-7e69b41075e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.218096] env[65503]: DEBUG nova.network.neutron [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 893.226663] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 893.226663] env[65503]: value = "task-4450200" [ 893.226663] env[65503]: _type = "Task" [ 893.226663] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.227782] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.228326] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.228326] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Deleting the datastore file [datastore2] 39266117-e82e-48ae-932a-be04b1a7351a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.231974] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e153df7-6c2e-4538-a61b-cbb5000ab4d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.240809] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450200, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.242332] env[65503]: DEBUG oslo_vmware.api [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for the task: (returnval){ [ 893.242332] env[65503]: value = "task-4450201" [ 893.242332] env[65503]: _type = "Task" [ 893.242332] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.252960] env[65503]: DEBUG oslo_vmware.api [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.336703] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450196, 'name': CreateVM_Task, 'duration_secs': 0.426523} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.336901] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.337353] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.337511] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.337848] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 893.338303] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a8f2c2d-d73c-44db-bb97-c8d5f2503f75 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.348366] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 893.348366] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528f3b79-0e25-f174-1e23-6e82407a1c15" [ 893.348366] env[65503]: _type = "Task" [ 893.348366] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.356107] env[65503]: DEBUG oslo_vmware.api [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450197, 'name': PowerOffVM_Task, 'duration_secs': 0.374912} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.356492] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.356779] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.356942] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73311cf8-38fd-4791-a9f1-462273995702 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.362452] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528f3b79-0e25-f174-1e23-6e82407a1c15, 'name': SearchDatastore_Task, 'duration_secs': 0.011069} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.362769] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.362998] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.363353] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.363431] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.363640] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.363954] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fdc51e9-acd0-4049-a5f7-e0f1fd1f6eb2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.374082] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.374314] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.375138] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c91d35c2-a597-43c3-94b5-c57abb41c50c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.382156] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 893.382156] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529e0ad5-773c-7f84-d7d4-f5f82f729ed5" [ 893.382156] env[65503]: _type = "Task" [ 893.382156] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.392545] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529e0ad5-773c-7f84-d7d4-f5f82f729ed5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.401348] env[65503]: DEBUG oslo_vmware.api [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450198, 'name': PowerOffVM_Task, 'duration_secs': 0.322312} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.401714] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.401877] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.402177] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca16591a-a70b-477a-84bb-1a1044280f86 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.435897] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.436154] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.436350] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Deleting the datastore file [datastore1] 429b7542-c288-4a7a-9032-09881938b256 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.436683] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f2a59f5-5580-4c50-bd35-2bc182200169 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.446538] env[65503]: DEBUG oslo_vmware.api [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for the task: (returnval){ [ 893.446538] env[65503]: value = "task-4450204" [ 893.446538] env[65503]: _type = "Task" [ 893.446538] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.456176] env[65503]: DEBUG oslo_vmware.api [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.489043] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.489043] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.489043] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleting the datastore file [datastore2] bcd845e2-5a89-4eef-bb76-33d69834bbc1 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.489437] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f9ccae5-98fa-4170-ad1d-e74a624dbe21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.499361] env[65503]: DEBUG oslo_vmware.api [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 893.499361] env[65503]: value = "task-4450205" [ 893.499361] env[65503]: _type = "Task" [ 893.499361] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.511857] env[65503]: DEBUG oslo_vmware.api [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450205, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.523188] env[65503]: DEBUG nova.scheduler.client.report [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.606414] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d7c7b6-69b3-95f5-0d72-c3af7b1115eb/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 893.607531] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f3f31f-d68e-429f-9765-519efacda06b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.614099] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.614332] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.614750] env[65503]: INFO nova.compute.manager [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Attaching volume adb6e1a6-ba84-48df-9bdb-d884f47fcd90 to /dev/sdb [ 893.618225] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d7c7b6-69b3-95f5-0d72-c3af7b1115eb/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 893.618365] env[65503]: ERROR oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d7c7b6-69b3-95f5-0d72-c3af7b1115eb/disk-0.vmdk due to incomplete transfer. [ 893.618679] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-de979585-ad58-46f3-b086-0b8e2c0b2ce5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.630201] env[65503]: DEBUG oslo_vmware.rw_handles [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d7c7b6-69b3-95f5-0d72-c3af7b1115eb/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 893.631759] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Uploaded image 9eb4755c-7e54-48f4-839b-82094d800434 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 893.634476] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 893.634830] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a19c6aa1-2089-46ee-bcd6-97b473b85fee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.643376] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 893.643376] env[65503]: value = "task-4450206" [ 893.643376] env[65503]: _type = "Task" [ 893.643376] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.655550] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450206, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.668059] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4fcb0d-8bb7-4330-9d92-93c14419c142 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.676325] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97ada32-fd64-4c17-a110-29d1fd0a23e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.692208] env[65503]: DEBUG nova.virt.block_device [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updating existing volume attachment record: 6fe8d5a7-af0f-4001-8d0d-f3fe4cff9ebe {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 893.722533] env[65503]: INFO nova.compute.manager [-] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Took 1.06 seconds to deallocate network for instance. [ 893.737611] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450200, 'name': ReconfigVM_Task, 'duration_secs': 0.351472} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.737852] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Reconfigured VM instance instance-00000047 to attach disk [datastore1] bc0c0066-b672-4385-8d68-c14e3635af4e/bc0c0066-b672-4385-8d68-c14e3635af4e.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.738772] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-663051e5-920b-4873-a925-4c651620b7df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.748837] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 893.748837] env[65503]: value = "task-4450207" [ 893.748837] env[65503]: _type = "Task" [ 893.748837] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.763050] env[65503]: DEBUG oslo_vmware.api [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Task: {'id': task-4450201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178974} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.769588] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.769588] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.769799] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.770091] env[65503]: INFO nova.compute.manager [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Took 1.20 seconds to destroy the instance on the hypervisor. [ 893.770454] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 893.770783] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450207, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.771132] env[65503]: DEBUG nova.compute.manager [-] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 893.771292] env[65503]: DEBUG nova.network.neutron [-] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 893.771673] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 893.772598] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 893.772967] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 893.876335] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 893.898582] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529e0ad5-773c-7f84-d7d4-f5f82f729ed5, 'name': SearchDatastore_Task, 'duration_secs': 0.010925} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.902401] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ac08472-7958-4613-b2dd-6c89454ba29b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.910437] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 893.910437] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bdc8d3-2f3c-61af-5e1e-18f42236ebbd" [ 893.910437] env[65503]: _type = "Task" [ 893.910437] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.926261] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bdc8d3-2f3c-61af-5e1e-18f42236ebbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.959352] env[65503]: DEBUG oslo_vmware.api [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Task: {'id': task-4450204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.450615} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.959636] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.959841] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.959992] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.960182] env[65503]: INFO nova.compute.manager [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] [instance: 429b7542-c288-4a7a-9032-09881938b256] Took 1.17 seconds to destroy the instance on the hypervisor. [ 893.960433] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 893.960622] env[65503]: DEBUG nova.compute.manager [-] [instance: 429b7542-c288-4a7a-9032-09881938b256] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 893.960713] env[65503]: DEBUG nova.network.neutron [-] [instance: 429b7542-c288-4a7a-9032-09881938b256] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 893.961151] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 893.961683] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 893.962616] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 894.013709] env[65503]: DEBUG oslo_vmware.api [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450205, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387445} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.014309] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.014384] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.014545] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.014723] env[65503]: INFO nova.compute.manager [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 894.016830] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 894.016830] env[65503]: DEBUG nova.compute.manager [-] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 894.016830] env[65503]: DEBUG nova.network.neutron [-] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 894.016830] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 894.016830] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 894.016830] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 894.028975] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.075s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.032075] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.246s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.034512] env[65503]: INFO nova.compute.claims [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.100937] env[65503]: INFO nova.scheduler.client.report [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted allocations for instance 9dbaff4f-ab02-481b-b51f-b134021d277c [ 894.158960] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450206, 'name': Destroy_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.181442] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 894.228758] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.274712] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450207, 'name': Rename_Task, 'duration_secs': 0.360488} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.278761] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.278761] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae981bdc-8823-4d50-b44e-667e502e611f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.289021] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 894.289021] env[65503]: value = "task-4450209" [ 894.289021] env[65503]: _type = "Task" [ 894.289021] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.306664] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.386225] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 894.422287] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bdc8d3-2f3c-61af-5e1e-18f42236ebbd, 'name': SearchDatastore_Task, 'duration_secs': 0.024989} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.422584] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.422890] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.423319] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af43b1b6-535e-4544-b77c-ac5103b402dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.434032] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 894.434032] env[65503]: value = "task-4450212" [ 894.434032] env[65503]: _type = "Task" [ 894.434032] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.441923] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.615286] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc49307f-9f70-4645-90f1-a37ad1c96fc1 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "9dbaff4f-ab02-481b-b51f-b134021d277c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.859s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.619412] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.619666] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.620112] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.620112] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.620300] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.623037] env[65503]: INFO nova.compute.manager [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Terminating instance [ 894.641920] env[65503]: DEBUG nova.network.neutron [-] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 894.662618] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450206, 'name': Destroy_Task, 'duration_secs': 0.691436} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.663128] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Destroyed the VM [ 894.663365] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 894.664133] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c8cdbc5b-9a5c-4699-bcd5-fa0c3a3d5220 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.677272] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 894.677272] env[65503]: value = "task-4450213" [ 894.677272] env[65503]: _type = "Task" [ 894.677272] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.687895] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450213, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.804033] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450209, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.951372] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450212, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.129846] env[65503]: DEBUG nova.compute.manager [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 895.129846] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.130721] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb947629-1457-499a-a07c-76eb9a44cbd8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.140634] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.141042] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4618ebb4-9850-40d0-aaf9-f3603c60e6e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.144420] env[65503]: INFO nova.compute.manager [-] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Took 1.37 seconds to deallocate network for instance. [ 895.152135] env[65503]: DEBUG oslo_vmware.api [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 895.152135] env[65503]: value = "task-4450214" [ 895.152135] env[65503]: _type = "Task" [ 895.152135] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.163754] env[65503]: DEBUG oslo_vmware.api [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.195680] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450213, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.226034] env[65503]: DEBUG nova.network.neutron [-] [instance: 429b7542-c288-4a7a-9032-09881938b256] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 895.305158] env[65503]: DEBUG oslo_vmware.api [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450209, 'name': PowerOnVM_Task, 'duration_secs': 0.832821} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.305471] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 895.305673] env[65503]: INFO nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Took 9.95 seconds to spawn the instance on the hypervisor. [ 895.305848] env[65503]: DEBUG nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 895.306771] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2351669e-d22f-4583-b921-58238fefca79 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.432624] env[65503]: DEBUG nova.compute.manager [req-50aa8700-6b60-4f4b-ab2c-2620df903eb6 req-23cf4b96-0eb5-49f8-856c-57a4d883b443 service nova] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Received event network-vif-deleted-03697e78-5935-45aa-a1d5-1bf8701e3f56 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 895.438907] env[65503]: DEBUG nova.network.neutron [-] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 895.447235] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5499} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.447510] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.447719] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.448034] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16deaaa4-aba4-400a-a324-de4b10c95059 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.460029] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 895.460029] env[65503]: value = "task-4450215" [ 895.460029] env[65503]: _type = "Task" [ 895.460029] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.472742] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.505755] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a21905c-3f67-4640-a6b9-94f255df2600 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.516363] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f06cc5-827c-4e5e-90b6-ac0269206e1c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.550782] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f75f43-c2c0-43bf-917b-2d936a215237 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.560288] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3d0cc1-e9a1-4ca0-a8d1-e3969ddca712 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.576829] env[65503]: DEBUG nova.compute.provider_tree [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.656145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.666201] env[65503]: DEBUG oslo_vmware.api [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450214, 'name': PowerOffVM_Task, 'duration_secs': 0.229527} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.666475] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.666637] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.666891] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26824b21-b70b-46cc-b5e4-4f0196dfc4c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.690971] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450213, 'name': RemoveSnapshot_Task, 'duration_secs': 0.618117} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.690971] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 895.690971] env[65503]: DEBUG nova.compute.manager [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 895.691305] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c866e940-76c2-446a-b391-2a941c787493 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.730042] env[65503]: INFO nova.compute.manager [-] [instance: 429b7542-c288-4a7a-9032-09881938b256] Took 1.77 seconds to deallocate network for instance. [ 895.739780] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.739780] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.739780] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Deleting the datastore file [datastore2] f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.739780] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c341cb97-c85c-4ab2-a656-8d361d319ef7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.746988] env[65503]: DEBUG oslo_vmware.api [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for the task: (returnval){ [ 895.746988] env[65503]: value = "task-4450217" [ 895.746988] env[65503]: _type = "Task" [ 895.746988] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.759862] env[65503]: DEBUG oslo_vmware.api [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.833154] env[65503]: INFO nova.compute.manager [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Took 43.21 seconds to build instance. [ 895.840161] env[65503]: DEBUG nova.compute.manager [req-2304ea12-dedc-4759-b7fd-8faae3fef6bc req-9e46ffbc-ad34-4299-979c-6ddfa952ad62 service nova] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Received event network-vif-deleted-2a4e1930-c256-421a-8d7b-a1ec9324152d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 895.941963] env[65503]: INFO nova.compute.manager [-] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Took 1.93 seconds to deallocate network for instance. [ 895.975877] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072666} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.975877] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.975877] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893a00b9-9c1c-4b0b-befb-61720b7d4097 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.002723] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.003952] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef4014ac-5f58-4689-a94e-c225a374b383 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.032854] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 896.032854] env[65503]: value = "task-4450218" [ 896.032854] env[65503]: _type = "Task" [ 896.032854] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.044847] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.080358] env[65503]: DEBUG nova.scheduler.client.report [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.206628] env[65503]: INFO nova.compute.manager [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Shelve offloading [ 896.243702] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.271093] env[65503]: DEBUG oslo_vmware.api [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Task: {'id': task-4450217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432454} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.271093] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.271093] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.271093] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.271093] env[65503]: INFO nova.compute.manager [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Took 1.14 seconds to destroy the instance on the hypervisor. [ 896.271093] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 896.271093] env[65503]: DEBUG nova.compute.manager [-] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 896.271093] env[65503]: DEBUG nova.network.neutron [-] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 896.271093] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 896.271093] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 896.271093] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 896.337554] env[65503]: DEBUG oslo_concurrency.lockutils [None req-15669d86-800c-4993-b01d-f119ea7f9e80 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.536s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.451960] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.479716] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 896.526582] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "1e92795e-cf30-4175-9e31-c29278f3e9e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.526963] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.527110] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "1e92795e-cf30-4175-9e31-c29278f3e9e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.527370] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.527567] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.534855] env[65503]: INFO nova.compute.manager [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Terminating instance [ 896.551841] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450218, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.587109] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.555s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.587769] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 896.593473] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.705s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.594523] env[65503]: INFO nova.compute.claims [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.710506] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 896.710871] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d4463e5-b015-4e90-99c9-93db7bc74134 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.720188] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 896.720188] env[65503]: value = "task-4450220" [ 896.720188] env[65503]: _type = "Task" [ 896.720188] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.730450] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 896.730681] env[65503]: DEBUG nova.compute.manager [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 896.731550] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc90a0c-1b78-4178-bb65-873258711c83 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.740248] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.740501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.740942] env[65503]: DEBUG nova.network.neutron [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 897.047527] env[65503]: DEBUG nova.compute.manager [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 897.050017] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.050017] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450218, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.050017] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276fb8ea-6e6d-487a-b639-f1e25c86d735 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.061050] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.061350] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74958c15-366d-4151-be96-aae765cae61e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.069651] env[65503]: DEBUG oslo_vmware.api [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 897.069651] env[65503]: value = "task-4450221" [ 897.069651] env[65503]: _type = "Task" [ 897.069651] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.078800] env[65503]: DEBUG oslo_vmware.api [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.101720] env[65503]: DEBUG nova.compute.utils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 897.107289] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 897.107289] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 897.107289] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 897.107289] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 897.107289] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 897.107289] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 897.167343] env[65503]: DEBUG nova.policy [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffa6631776ba4f168d3d2a0168b858e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81064933e6fe4abf9c18f13d18c58037', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 897.244948] env[65503]: WARNING neutronclient.v2_0.client [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 897.246609] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 897.247230] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 897.292900] env[65503]: DEBUG nova.network.neutron [-] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 897.554340] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450218, 'name': ReconfigVM_Task, 'duration_secs': 1.350551} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.554813] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Reconfigured VM instance instance-00000044 to attach disk [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec/dcc876ae-075f-48d2-81a4-a1b780d6fdec.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.556397] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23089951-01ee-4ccb-86ae-13c11a3829e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.562638] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Successfully created port: ba7e7de1-3fbb-49ea-889a-72f446d61a64 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 897.567810] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 897.567810] env[65503]: value = "task-4450222" [ 897.567810] env[65503]: _type = "Task" [ 897.567810] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.584221] env[65503]: DEBUG oslo_vmware.api [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450221, 'name': PowerOffVM_Task, 'duration_secs': 0.501767} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.587748] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.587883] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 897.588201] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450222, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.588410] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e300345e-d07a-4d9f-a706-6cbe5e4bbebf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.618655] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 897.669034] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 897.669034] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 897.669034] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleting the datastore file [datastore1] 1e92795e-cf30-4175-9e31-c29278f3e9e0 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 897.669034] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fdf8bb3-0d59-477d-9be7-244c5f5f8404 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.681040] env[65503]: DEBUG oslo_vmware.api [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 897.681040] env[65503]: value = "task-4450224" [ 897.681040] env[65503]: _type = "Task" [ 897.681040] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.692639] env[65503]: DEBUG oslo_vmware.api [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.795952] env[65503]: INFO nova.compute.manager [-] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Took 1.53 seconds to deallocate network for instance. [ 898.026298] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802e45b8-5831-4bb3-99ba-66fd0e20e59c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.032087] env[65503]: DEBUG nova.compute.manager [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 898.034927] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4689afa4-f40a-4f7c-98c8-51741bcb3b12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.040367] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa143a8b-cb54-467b-b6b8-d88d10c15fb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.080151] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0448ae-bd32-400b-81c1-28431eaeca48 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.090103] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450222, 'name': Rename_Task, 'duration_secs': 0.191875} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.092147] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.092449] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-584a182b-c5fb-43a3-afc5-518c462f6e0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.095128] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8259e6-a073-4856-9d99-912715b53542 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.114348] env[65503]: DEBUG nova.compute.provider_tree [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.120275] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 898.120275] env[65503]: value = "task-4450225" [ 898.120275] env[65503]: _type = "Task" [ 898.120275] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.131092] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450225, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.193664] env[65503]: DEBUG oslo_vmware.api [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245873} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.194028] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 898.194242] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 898.194474] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.194702] env[65503]: INFO nova.compute.manager [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 898.195008] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 898.195254] env[65503]: DEBUG nova.compute.manager [-] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 898.195372] env[65503]: DEBUG nova.network.neutron [-] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 898.197411] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 898.197411] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 898.197411] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 898.307391] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.578168] env[65503]: INFO nova.compute.manager [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] instance snapshotting [ 898.581172] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6d3daa-ca7e-4e6d-a0d2-d2472ef3ed66 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.604270] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b2c20c-4c1e-4e3a-8a07-7c815aad1c60 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.636879] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 898.637483] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450225, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.643828] env[65503]: ERROR nova.scheduler.client.report [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [req-65e419e3-fd94-46e8-882e-fe1facdabf25] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-65e419e3-fd94-46e8-882e-fe1facdabf25"}]} [ 898.667737] env[65503]: DEBUG nova.scheduler.client.report [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 898.674568] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 898.674839] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 898.674996] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 898.675426] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 898.675606] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 898.675755] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 898.676013] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.676133] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 898.676300] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 898.676463] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 898.676622] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 898.677818] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cd085a-4377-4d56-97c7-2f45e68b3d31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.690438] env[65503]: DEBUG nova.scheduler.client.report [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 898.690680] env[65503]: DEBUG nova.compute.provider_tree [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.693700] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3e7f4d-f17a-4e68-965e-e5c8a9094d5e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.712248] env[65503]: DEBUG nova.scheduler.client.report [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 898.734587] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 898.740478] env[65503]: DEBUG nova.scheduler.client.report [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 898.751264] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 898.751663] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 898.760361] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 898.760591] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870394', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'name': 'volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e4c1c94b-744f-4bed-8e68-3b3f9de7db44', 'attached_at': '', 'detached_at': '', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'serial': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 898.762039] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93d8147-bb21-4009-b185-20a42ebef3ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.779760] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dadfe51-b786-4265-b3cc-32f2ff7aac45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.805731] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90/volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.808765] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4200a3c-8c97-4dfe-84a6-60ca0f373b8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.830592] env[65503]: DEBUG oslo_vmware.api [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 898.830592] env[65503]: value = "task-4450226" [ 898.830592] env[65503]: _type = "Task" [ 898.830592] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.843190] env[65503]: DEBUG oslo_vmware.api [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.080178] env[65503]: WARNING neutronclient.v2_0.client [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 899.080928] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 899.081529] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 899.117260] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 899.117609] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e440efb0-60a3-4f8d-83f7-3197c3661bcc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.142133] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 899.142133] env[65503]: value = "task-4450227" [ 899.142133] env[65503]: _type = "Task" [ 899.142133] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.154359] env[65503]: DEBUG oslo_vmware.api [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450225, 'name': PowerOnVM_Task, 'duration_secs': 0.881492} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.156061] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.156061] env[65503]: DEBUG nova.compute.manager [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 899.156697] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2112d2b1-7b98-49cd-9384-71927fcffda5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.165607] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450227, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.199661] env[65503]: DEBUG nova.network.neutron [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Updating instance_info_cache with network_info: [{"id": "bf6fff36-e288-4f03-b705-1d02e2d90395", "address": "fa:16:3e:e9:41:4a", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf6fff36-e2", "ovs_interfaceid": "bf6fff36-e288-4f03-b705-1d02e2d90395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 899.248395] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c01b832-b3bf-459b-838b-af6a7a531f8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.261196] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5a1ea7-20d5-455d-9d41-047a102cee41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.296129] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d41760c-29d6-4523-a5ee-5e1d001ed2f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.300410] env[65503]: DEBUG nova.compute.manager [req-f5779904-e874-416c-9103-99c5db260dcf req-2f1005c3-8559-4f21-9850-f196867bf20b service nova] [instance: 429b7542-c288-4a7a-9032-09881938b256] Received event network-vif-deleted-c56d3579-9224-4ab6-8078-adc2f2c2803b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 899.300645] env[65503]: DEBUG nova.compute.manager [req-f5779904-e874-416c-9103-99c5db260dcf req-2f1005c3-8559-4f21-9850-f196867bf20b service nova] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Received event network-vif-deleted-3e858b14-8f22-4201-9e89-d017fcba1f2d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 899.308631] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf8ee95-6760-4473-947b-43c6a7e5caed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.327474] env[65503]: DEBUG nova.compute.provider_tree [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.344749] env[65503]: DEBUG oslo_vmware.api [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450226, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.480818] env[65503]: DEBUG nova.network.neutron [-] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 899.660712] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450227, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.691127] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.703496] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.703496] env[65503]: WARNING neutronclient.v2_0.client [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 899.704240] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 899.704514] env[65503]: WARNING openstack [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 899.709530] env[65503]: WARNING neutronclient.v2_0.client [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 899.713080] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Successfully updated port: ba7e7de1-3fbb-49ea-889a-72f446d61a64 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 899.850595] env[65503]: DEBUG oslo_vmware.api [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450226, 'name': ReconfigVM_Task, 'duration_secs': 0.57224} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.851313] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Reconfigured VM instance instance-0000003c to attach disk [datastore2] volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90/volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.856262] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97d510e6-fab2-4d20-965c-3b1d35da7edc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.875195] env[65503]: DEBUG oslo_vmware.api [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 899.875195] env[65503]: value = "task-4450228" [ 899.875195] env[65503]: _type = "Task" [ 899.875195] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.881371] env[65503]: DEBUG nova.scheduler.client.report [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 96 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 899.881697] env[65503]: DEBUG nova.compute.provider_tree [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 96 to 97 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 899.881924] env[65503]: DEBUG nova.compute.provider_tree [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.889036] env[65503]: DEBUG oslo_vmware.api [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450228, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.898291] env[65503]: DEBUG nova.compute.manager [req-ee831e23-968f-478d-bf37-71ce3c2b8a50 req-33ea3fd3-48db-4bae-8908-128760a37a39 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Received event network-vif-plugged-ba7e7de1-3fbb-49ea-889a-72f446d61a64 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 899.898516] env[65503]: DEBUG oslo_concurrency.lockutils [req-ee831e23-968f-478d-bf37-71ce3c2b8a50 req-33ea3fd3-48db-4bae-8908-128760a37a39 service nova] Acquiring lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.899022] env[65503]: DEBUG oslo_concurrency.lockutils [req-ee831e23-968f-478d-bf37-71ce3c2b8a50 req-33ea3fd3-48db-4bae-8908-128760a37a39 service nova] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.899644] env[65503]: DEBUG oslo_concurrency.lockutils [req-ee831e23-968f-478d-bf37-71ce3c2b8a50 req-33ea3fd3-48db-4bae-8908-128760a37a39 service nova] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.900610] env[65503]: DEBUG nova.compute.manager [req-ee831e23-968f-478d-bf37-71ce3c2b8a50 req-33ea3fd3-48db-4bae-8908-128760a37a39 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] No waiting events found dispatching network-vif-plugged-ba7e7de1-3fbb-49ea-889a-72f446d61a64 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 899.900610] env[65503]: WARNING nova.compute.manager [req-ee831e23-968f-478d-bf37-71ce3c2b8a50 req-33ea3fd3-48db-4bae-8908-128760a37a39 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Received unexpected event network-vif-plugged-ba7e7de1-3fbb-49ea-889a-72f446d61a64 for instance with vm_state building and task_state spawning. [ 899.985564] env[65503]: INFO nova.compute.manager [-] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Took 1.79 seconds to deallocate network for instance. [ 900.074452] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.075389] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b726b53-8f29-4f2d-bd25-24a2bcdadeb9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.086073] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.086310] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5be4cda-07d0-46fa-87ae-31adb8479902 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.155238] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.155608] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.155775] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleting the datastore file [datastore1] 628e67fa-9a28-468f-85ad-990d3f1e5d8c {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.159208] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e3f85b1-6773-4840-b58f-9738c019ffac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.161417] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450227, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.168096] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 900.168096] env[65503]: value = "task-4450230" [ 900.168096] env[65503]: _type = "Task" [ 900.168096] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.179956] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.191298] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.191419] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.191938] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.191938] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.192073] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.194665] env[65503]: INFO nova.compute.manager [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Terminating instance [ 900.222802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "refresh_cache-d7dd714a-a738-4d68-bbf0-32daf4a1c49b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.223094] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "refresh_cache-d7dd714a-a738-4d68-bbf0-32daf4a1c49b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.223412] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 900.389918] env[65503]: DEBUG oslo_vmware.api [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450228, 'name': ReconfigVM_Task, 'duration_secs': 0.209092} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.390801] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.799s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.391415] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 900.394167] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870394', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'name': 'volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e4c1c94b-744f-4bed-8e68-3b3f9de7db44', 'attached_at': '', 'detached_at': '', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'serial': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 900.395788] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.137s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.397879] env[65503]: INFO nova.compute.claims [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 900.494812] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.659960] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "f666b0a3-3679-456b-bc59-38107c299f80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.660419] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "f666b0a3-3679-456b-bc59-38107c299f80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.667412] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450227, 'name': CreateSnapshot_Task, 'duration_secs': 1.043506} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.667827] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 900.668759] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5965ac17-046f-476d-b1bf-8b08ab108122 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.682206] env[65503]: DEBUG oslo_vmware.api [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357584} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.686331] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.686670] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.686955] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.699746] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "refresh_cache-dcc876ae-075f-48d2-81a4-a1b780d6fdec" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.699746] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquired lock "refresh_cache-dcc876ae-075f-48d2-81a4-a1b780d6fdec" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.699746] env[65503]: DEBUG nova.network.neutron [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 900.717120] env[65503]: INFO nova.scheduler.client.report [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted allocations for instance 628e67fa-9a28-468f-85ad-990d3f1e5d8c [ 900.727109] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.727109] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.772027] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 900.788124] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.788515] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.852492] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.853411] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.853879] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.897844] env[65503]: DEBUG nova.compute.utils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 900.899334] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 900.899944] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 900.899944] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.900217] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.900734] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.901145] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.947390] env[65503]: DEBUG nova.policy [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffa6631776ba4f168d3d2a0168b858e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81064933e6fe4abf9c18f13d18c58037', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 900.967138] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Updating instance_info_cache with network_info: [{"id": "ba7e7de1-3fbb-49ea-889a-72f446d61a64", "address": "fa:16:3e:49:af:73", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7e7de1-3f", "ovs_interfaceid": "ba7e7de1-3fbb-49ea-889a-72f446d61a64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 901.163661] env[65503]: DEBUG nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 901.204765] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 901.205192] env[65503]: WARNING neutronclient.v2_0.client [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 901.206751] env[65503]: WARNING openstack [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 901.206751] env[65503]: WARNING openstack [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 901.214547] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7339d89c-e551-47da-821e-7428b99b9167 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.221892] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.224877] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 901.224877] env[65503]: value = "task-4450231" [ 901.224877] env[65503]: _type = "Task" [ 901.224877] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.239715] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450231, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.254065] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Successfully created port: 76dfa38d-18b0-4fa3-a4ac-7988e750f50a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 901.259192] env[65503]: DEBUG nova.network.neutron [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 901.335322] env[65503]: DEBUG nova.network.neutron [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 901.409148] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 901.455436] env[65503]: DEBUG nova.objects.instance [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.471971] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "refresh_cache-d7dd714a-a738-4d68-bbf0-32daf4a1c49b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.472750] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Instance network_info: |[{"id": "ba7e7de1-3fbb-49ea-889a-72f446d61a64", "address": "fa:16:3e:49:af:73", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7e7de1-3f", "ovs_interfaceid": "ba7e7de1-3fbb-49ea-889a-72f446d61a64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 901.473538] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:af:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba7e7de1-3fbb-49ea-889a-72f446d61a64', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.482873] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 901.485118] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.485118] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ef6ec27-be65-44b5-9449-f50dd9493ff0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.516502] env[65503]: DEBUG nova.compute.manager [req-d772bc0c-d9da-435a-ba92-8d5b2ac94dfb req-360eb949-ef2f-4401-adf5-925fdbb76d4b service nova] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Received event network-vif-deleted-cde44ebc-2cc2-4df5-9886-d3766e7f4bb9 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 901.521656] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.521656] env[65503]: value = "task-4450232" [ 901.521656] env[65503]: _type = "Task" [ 901.521656] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.538577] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450232, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.686999] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.735616] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450231, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.840323] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Releasing lock "refresh_cache-dcc876ae-075f-48d2-81a4-a1b780d6fdec" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.840822] env[65503]: DEBUG nova.compute.manager [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 901.841043] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.842217] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23219375-7dcd-4e3f-b917-265f2f122bf0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.851178] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.852317] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad7ccff9-a0c4-49b9-b938-c0b14f7f9a8f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.854725] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc49516-cc3e-4cb6-94f4-94480a6c48db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.865124] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef54406-0fec-44ba-ab75-e85855cddc91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.868926] env[65503]: DEBUG oslo_vmware.api [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 901.868926] env[65503]: value = "task-4450233" [ 901.868926] env[65503]: _type = "Task" [ 901.868926] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.903020] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d15396e-e5ab-4502-9881-e2f4fb8bb6dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.905927] env[65503]: DEBUG oslo_vmware.api [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.921114] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291a05cc-89dc-4de8-8cff-b3fa4e4d68af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.941119] env[65503]: DEBUG nova.compute.provider_tree [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.960536] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1125ce06-3e24-43b2-a132-60e064481489 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.346s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.034914] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450232, 'name': CreateVM_Task, 'duration_secs': 0.474821} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.035236] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.035842] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 902.036256] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.036414] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.036737] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 902.037348] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570c7dfd-8252-4e60-93ee-4b686401a2d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.043282] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 902.043282] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5211fb8a-820b-b9cc-8d85-3f90dfc22b09" [ 902.043282] env[65503]: _type = "Task" [ 902.043282] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.051814] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5211fb8a-820b-b9cc-8d85-3f90dfc22b09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.109284] env[65503]: DEBUG nova.compute.manager [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Received event network-changed-ba7e7de1-3fbb-49ea-889a-72f446d61a64 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 902.109523] env[65503]: DEBUG nova.compute.manager [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Refreshing instance network info cache due to event network-changed-ba7e7de1-3fbb-49ea-889a-72f446d61a64. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 902.109737] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Acquiring lock "refresh_cache-d7dd714a-a738-4d68-bbf0-32daf4a1c49b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.109875] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Acquired lock "refresh_cache-d7dd714a-a738-4d68-bbf0-32daf4a1c49b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.110043] env[65503]: DEBUG nova.network.neutron [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Refreshing network info cache for port ba7e7de1-3fbb-49ea-889a-72f446d61a64 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 902.237114] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450231, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.380115] env[65503]: DEBUG oslo_vmware.api [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450233, 'name': PowerOffVM_Task, 'duration_secs': 0.158344} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.380476] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.380652] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.380922] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a47cbdc-3305-41c9-910e-12cbb8b67536 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.408569] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.408817] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.408992] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Deleting the datastore file [datastore2] dcc876ae-075f-48d2-81a4-a1b780d6fdec {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.409306] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfd045e5-58ca-4cdc-a74e-3c127a4eb7e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.412046] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.412284] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.412448] env[65503]: DEBUG nova.compute.manager [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 902.413343] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021fb75b-e550-4cc1-a085-1477d4ef12e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.420756] env[65503]: DEBUG nova.compute.manager [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 902.421412] env[65503]: DEBUG nova.objects.instance [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.424489] env[65503]: DEBUG oslo_vmware.api [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for the task: (returnval){ [ 902.424489] env[65503]: value = "task-4450235" [ 902.424489] env[65503]: _type = "Task" [ 902.424489] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.429761] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 902.439059] env[65503]: DEBUG oslo_vmware.api [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.445302] env[65503]: DEBUG nova.scheduler.client.report [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.457907] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 902.458207] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.458364] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 902.458596] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.458804] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 902.458999] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 902.459266] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 902.460315] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 902.460315] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 902.460315] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 902.460315] env[65503]: DEBUG nova.virt.hardware [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 902.461371] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566c8a5c-f200-4518-9586-7072b1b550bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.472191] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ca5caf-0617-4c3b-ac43-d7fa66272aa6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.554174] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5211fb8a-820b-b9cc-8d85-3f90dfc22b09, 'name': SearchDatastore_Task, 'duration_secs': 0.03599} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.554482] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.554711] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.554941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.555097] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.555366] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.555629] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6926e94b-1013-48cf-9945-46f7b1337545 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.574757] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.574956] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.575840] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aef919e6-5f0c-4500-8258-74fa2cde96e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.581890] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 902.581890] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5271f0bb-f731-2b5a-1bd0-4f597c6d3ba1" [ 902.581890] env[65503]: _type = "Task" [ 902.581890] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.590891] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5271f0bb-f731-2b5a-1bd0-4f597c6d3ba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.612336] env[65503]: WARNING neutronclient.v2_0.client [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 902.613230] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 902.613620] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 902.738872] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450231, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.806075] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 902.806451] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 902.815167] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.891951] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Successfully updated port: 76dfa38d-18b0-4fa3-a4ac-7988e750f50a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 902.912091] env[65503]: WARNING neutronclient.v2_0.client [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 902.912549] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 902.913068] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 902.936283] env[65503]: DEBUG oslo_vmware.api [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Task: {'id': task-4450235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241558} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.936655] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.936815] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.936901] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.937071] env[65503]: INFO nova.compute.manager [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Took 1.10 seconds to destroy the instance on the hypervisor. [ 902.937310] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 902.937501] env[65503]: DEBUG nova.compute.manager [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 902.937649] env[65503]: DEBUG nova.network.neutron [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 902.937835] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 902.938373] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 902.938632] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 902.950380] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.951064] env[65503]: DEBUG nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 902.955196] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.051s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.957078] env[65503]: INFO nova.compute.claims [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.986551] env[65503]: DEBUG nova.network.neutron [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 902.986984] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 903.027679] env[65503]: DEBUG nova.network.neutron [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Updated VIF entry in instance network info cache for port ba7e7de1-3fbb-49ea-889a-72f446d61a64. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 903.027995] env[65503]: DEBUG nova.network.neutron [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Updating instance_info_cache with network_info: [{"id": "ba7e7de1-3fbb-49ea-889a-72f446d61a64", "address": "fa:16:3e:49:af:73", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7e7de1-3f", "ovs_interfaceid": "ba7e7de1-3fbb-49ea-889a-72f446d61a64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 903.092956] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5271f0bb-f731-2b5a-1bd0-4f597c6d3ba1, 'name': SearchDatastore_Task, 'duration_secs': 0.012748} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.093793] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aed6a305-709f-4781-a7c6-9b5ac7446385 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.100266] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 903.100266] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526fbe65-1d8d-da31-3cb5-5f30a4219b37" [ 903.100266] env[65503]: _type = "Task" [ 903.100266] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.110223] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526fbe65-1d8d-da31-3cb5-5f30a4219b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.240037] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450231, 'name': CloneVM_Task, 'duration_secs': 1.815675} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.240390] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Created linked-clone VM from snapshot [ 903.241200] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8a7cfe-2d6d-4d8c-b9ed-0db79d13cd53 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.250469] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Uploading image e2f971c6-aded-4d92-880a-1d895555c7ca {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 903.263275] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 903.263685] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5b05ff9d-866a-43fe-8d9b-8302facf9c13 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.273164] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 903.273164] env[65503]: value = "task-4450236" [ 903.273164] env[65503]: _type = "Task" [ 903.273164] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.285351] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450236, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.395525] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "refresh_cache-236e651f-6a27-4601-8a76-ca1619e32dc6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.395981] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "refresh_cache-236e651f-6a27-4601-8a76-ca1619e32dc6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.396208] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 903.434192] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.434551] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0df4ee8c-c628-4f0d-9381-da36018636d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.443094] env[65503]: DEBUG oslo_vmware.api [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 903.443094] env[65503]: value = "task-4450237" [ 903.443094] env[65503]: _type = "Task" [ 903.443094] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.454255] env[65503]: DEBUG oslo_vmware.api [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.456927] env[65503]: DEBUG nova.compute.utils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 903.458520] env[65503]: DEBUG nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 903.458764] env[65503]: DEBUG nova.network.neutron [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 903.459455] env[65503]: WARNING neutronclient.v2_0.client [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 903.459642] env[65503]: WARNING neutronclient.v2_0.client [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 903.460164] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 903.460575] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 903.489226] env[65503]: DEBUG nova.network.neutron [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 903.515196] env[65503]: DEBUG nova.policy [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e193d8d730e14c348b38c407f58cdc56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34e8cd66745a40d2acebbce98050ee5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 903.531234] env[65503]: DEBUG nova.compute.manager [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Received event network-vif-plugged-76dfa38d-18b0-4fa3-a4ac-7988e750f50a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 903.531590] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Acquiring lock "236e651f-6a27-4601-8a76-ca1619e32dc6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.531927] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.532139] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.532330] env[65503]: DEBUG nova.compute.manager [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] No waiting events found dispatching network-vif-plugged-76dfa38d-18b0-4fa3-a4ac-7988e750f50a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 903.532516] env[65503]: WARNING nova.compute.manager [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Received unexpected event network-vif-plugged-76dfa38d-18b0-4fa3-a4ac-7988e750f50a for instance with vm_state building and task_state spawning. [ 903.532686] env[65503]: DEBUG nova.compute.manager [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Received event network-changed-76dfa38d-18b0-4fa3-a4ac-7988e750f50a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 903.532986] env[65503]: DEBUG nova.compute.manager [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Refreshing instance network info cache due to event network-changed-76dfa38d-18b0-4fa3-a4ac-7988e750f50a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 903.533290] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Acquiring lock "refresh_cache-236e651f-6a27-4601-8a76-ca1619e32dc6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.533656] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Releasing lock "refresh_cache-d7dd714a-a738-4d68-bbf0-32daf4a1c49b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.533891] env[65503]: DEBUG nova.compute.manager [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Received event network-vif-unplugged-bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 903.534076] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Acquiring lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.534282] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.534479] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.534591] env[65503]: DEBUG nova.compute.manager [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] No waiting events found dispatching network-vif-unplugged-bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 903.534755] env[65503]: WARNING nova.compute.manager [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Received unexpected event network-vif-unplugged-bf6fff36-e288-4f03-b705-1d02e2d90395 for instance with vm_state shelved_offloaded and task_state None. [ 903.534916] env[65503]: DEBUG nova.compute.manager [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Received event network-changed-bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 903.535077] env[65503]: DEBUG nova.compute.manager [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Refreshing instance network info cache due to event network-changed-bf6fff36-e288-4f03-b705-1d02e2d90395. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 903.535342] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Acquiring lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.535500] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Acquired lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.535655] env[65503]: DEBUG nova.network.neutron [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Refreshing network info cache for port bf6fff36-e288-4f03-b705-1d02e2d90395 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 903.613950] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526fbe65-1d8d-da31-3cb5-5f30a4219b37, 'name': SearchDatastore_Task, 'duration_secs': 0.015664} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.614212] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.614476] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] d7dd714a-a738-4d68-bbf0-32daf4a1c49b/d7dd714a-a738-4d68-bbf0-32daf4a1c49b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.614880] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-369c81bb-b542-4772-a415-1e06d75b2a21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.623461] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 903.623461] env[65503]: value = "task-4450238" [ 903.623461] env[65503]: _type = "Task" [ 903.623461] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.632287] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.790060] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450236, 'name': Destroy_Task, 'duration_secs': 0.492847} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.790060] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Destroyed the VM [ 903.790060] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 903.790060] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c86f84e1-6c8d-4887-8918-ee9eaf576db7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.795778] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 903.795778] env[65503]: value = "task-4450239" [ 903.795778] env[65503]: _type = "Task" [ 903.795778] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.809934] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450239, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.876279] env[65503]: DEBUG nova.network.neutron [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Successfully created port: c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 903.899928] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 903.900285] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 903.955230] env[65503]: DEBUG oslo_vmware.api [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450237, 'name': PowerOffVM_Task, 'duration_secs': 0.302497} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.955230] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.955230] env[65503]: DEBUG nova.compute.manager [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 903.956052] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4631b1c-0741-42a1-a090-1a4f2e311371 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.966640] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 903.969751] env[65503]: DEBUG nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 903.992401] env[65503]: INFO nova.compute.manager [-] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Took 1.05 seconds to deallocate network for instance. [ 904.038592] env[65503]: WARNING neutronclient.v2_0.client [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 904.039360] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 904.039816] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 904.084947] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 904.085864] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 904.135158] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450238, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.270785] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 904.271621] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 904.272066] env[65503]: WARNING openstack [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 904.307450] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450239, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.317618] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 904.318136] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 904.423205] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babb92dd-4cf2-4738-9df1-ea54712fe8ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.433457] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2722db95-0e18-429c-989b-a9eb3425bc89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.479439] env[65503]: DEBUG nova.network.neutron [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Updating instance_info_cache with network_info: [{"id": "76dfa38d-18b0-4fa3-a4ac-7988e750f50a", "address": "fa:16:3e:dc:65:48", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76dfa38d-18", "ovs_interfaceid": "76dfa38d-18b0-4fa3-a4ac-7988e750f50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 904.486484] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d098117-66c0-441f-b973-3b6fa0f935b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.492443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-07b55b40-74e5-42e2-8928-68cfeb757787 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.080s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.499953] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.501486] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fed9f9-de42-43ba-87ef-a1ede0ef8954 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.518475] env[65503]: DEBUG nova.compute.provider_tree [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.542048] env[65503]: WARNING neutronclient.v2_0.client [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 904.542733] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 904.543126] env[65503]: WARNING openstack [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 904.635635] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450238, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520291} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.635903] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] d7dd714a-a738-4d68-bbf0-32daf4a1c49b/d7dd714a-a738-4d68-bbf0-32daf4a1c49b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 904.636143] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 904.636412] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e1aaa9d-f4cc-4c7e-abcc-0d0ebeec2116 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.645969] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 904.645969] env[65503]: value = "task-4450240" [ 904.645969] env[65503]: _type = "Task" [ 904.645969] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.656232] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450240, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.696395] env[65503]: DEBUG nova.network.neutron [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Updated VIF entry in instance network info cache for port bf6fff36-e288-4f03-b705-1d02e2d90395. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 904.696970] env[65503]: DEBUG nova.network.neutron [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Updating instance_info_cache with network_info: [{"id": "bf6fff36-e288-4f03-b705-1d02e2d90395", "address": "fa:16:3e:e9:41:4a", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": null, "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbf6fff36-e2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 904.810046] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450239, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.993426] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "refresh_cache-236e651f-6a27-4601-8a76-ca1619e32dc6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.993867] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Instance network_info: |[{"id": "76dfa38d-18b0-4fa3-a4ac-7988e750f50a", "address": "fa:16:3e:dc:65:48", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76dfa38d-18", "ovs_interfaceid": "76dfa38d-18b0-4fa3-a4ac-7988e750f50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 904.994285] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Acquired lock "refresh_cache-236e651f-6a27-4601-8a76-ca1619e32dc6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.994463] env[65503]: DEBUG nova.network.neutron [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Refreshing network info cache for port 76dfa38d-18b0-4fa3-a4ac-7988e750f50a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 904.995792] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:65:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76dfa38d-18b0-4fa3-a4ac-7988e750f50a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.003451] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 905.005414] env[65503]: DEBUG nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 905.007662] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.009449] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2aee92ed-a944-47f8-bb94-04cd0b87ebe8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.026180] env[65503]: DEBUG nova.scheduler.client.report [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.038371] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.038371] env[65503]: value = "task-4450241" [ 905.038371] env[65503]: _type = "Task" [ 905.038371] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.046334] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 905.046686] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 905.046950] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 905.047200] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 905.047416] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 905.047632] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 905.047924] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.048168] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 905.048439] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 905.048674] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 905.048958] env[65503]: DEBUG nova.virt.hardware [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 905.054018] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05aaf2a-a05f-4ee8-abb7-f957a4c9b5ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.061127] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450241, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.064656] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adaba82-b620-4861-81cf-5e74a7984980 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.157605] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.49714} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.158042] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.158690] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5771f6-8126-4541-8c6d-cbce52cafa25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.186912] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] d7dd714a-a738-4d68-bbf0-32daf4a1c49b/d7dd714a-a738-4d68-bbf0-32daf4a1c49b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.188059] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c677c51c-ad77-4fc2-8709-b041107ec367 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.206763] env[65503]: DEBUG oslo_concurrency.lockutils [req-14d0e4f8-abea-4c28-91bd-068473be744e req-f132c732-e606-4af3-9d09-4f79b355dd63 service nova] Releasing lock "refresh_cache-628e67fa-9a28-468f-85ad-990d3f1e5d8c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.213964] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 905.213964] env[65503]: value = "task-4450242" [ 905.213964] env[65503]: _type = "Task" [ 905.213964] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.225102] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450242, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.308876] env[65503]: DEBUG oslo_vmware.api [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450239, 'name': RemoveSnapshot_Task, 'duration_secs': 1.414112} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.309241] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 905.509464] env[65503]: WARNING neutronclient.v2_0.client [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.509464] env[65503]: WARNING openstack [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 905.509464] env[65503]: WARNING openstack [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 905.517793] env[65503]: DEBUG nova.network.neutron [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Successfully updated port: c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 905.534721] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.535485] env[65503]: DEBUG nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 905.537754] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.029s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.539207] env[65503]: INFO nova.compute.claims [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.556991] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450241, 'name': CreateVM_Task, 'duration_secs': 0.406243} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.557252] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.557743] env[65503]: WARNING neutronclient.v2_0.client [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.558871] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.559912] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.559912] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 905.559912] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-328d96e4-7018-4a68-b6c8-9edef466d412 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.566929] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 905.566929] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f76997-1a25-78c7-7ab4-d3fbaa74d1a3" [ 905.566929] env[65503]: _type = "Task" [ 905.566929] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.577985] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f76997-1a25-78c7-7ab4-d3fbaa74d1a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.596289] env[65503]: DEBUG nova.compute.manager [req-3bf18325-570b-42f2-8710-064dc251bcd2 req-ca2b9f95-5d3f-41c9-9e16-86ce2c7814c6 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Received event network-vif-plugged-c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 905.596700] env[65503]: DEBUG oslo_concurrency.lockutils [req-3bf18325-570b-42f2-8710-064dc251bcd2 req-ca2b9f95-5d3f-41c9-9e16-86ce2c7814c6 service nova] Acquiring lock "909b3535-9410-4820-a34d-6c0e9627f506-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.596796] env[65503]: DEBUG oslo_concurrency.lockutils [req-3bf18325-570b-42f2-8710-064dc251bcd2 req-ca2b9f95-5d3f-41c9-9e16-86ce2c7814c6 service nova] Lock "909b3535-9410-4820-a34d-6c0e9627f506-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.596932] env[65503]: DEBUG oslo_concurrency.lockutils [req-3bf18325-570b-42f2-8710-064dc251bcd2 req-ca2b9f95-5d3f-41c9-9e16-86ce2c7814c6 service nova] Lock "909b3535-9410-4820-a34d-6c0e9627f506-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.597146] env[65503]: DEBUG nova.compute.manager [req-3bf18325-570b-42f2-8710-064dc251bcd2 req-ca2b9f95-5d3f-41c9-9e16-86ce2c7814c6 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] No waiting events found dispatching network-vif-plugged-c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 905.597333] env[65503]: WARNING nova.compute.manager [req-3bf18325-570b-42f2-8710-064dc251bcd2 req-ca2b9f95-5d3f-41c9-9e16-86ce2c7814c6 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Received unexpected event network-vif-plugged-c1eaff02-bbf6-47d2-9655-443ca8021c5a for instance with vm_state building and task_state spawning. [ 905.617423] env[65503]: DEBUG nova.objects.instance [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.633151] env[65503]: WARNING openstack [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 905.633554] env[65503]: WARNING openstack [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 905.715463] env[65503]: WARNING neutronclient.v2_0.client [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.716201] env[65503]: WARNING openstack [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 905.716602] env[65503]: WARNING openstack [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 905.734525] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450242, 'name': ReconfigVM_Task, 'duration_secs': 0.451997} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.734804] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Reconfigured VM instance instance-00000048 to attach disk [datastore2] d7dd714a-a738-4d68-bbf0-32daf4a1c49b/d7dd714a-a738-4d68-bbf0-32daf4a1c49b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.735470] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f02722a-6ae3-490a-9930-e801da399ad8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.746927] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 905.746927] env[65503]: value = "task-4450243" [ 905.746927] env[65503]: _type = "Task" [ 905.746927] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.754315] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450243, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.804658] env[65503]: DEBUG nova.network.neutron [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Updated VIF entry in instance network info cache for port 76dfa38d-18b0-4fa3-a4ac-7988e750f50a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 905.806453] env[65503]: DEBUG nova.network.neutron [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Updating instance_info_cache with network_info: [{"id": "76dfa38d-18b0-4fa3-a4ac-7988e750f50a", "address": "fa:16:3e:dc:65:48", "network": {"id": "1f570b57-1d81-401e-860f-08e71eec4971", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-998795973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81064933e6fe4abf9c18f13d18c58037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76dfa38d-18", "ovs_interfaceid": "76dfa38d-18b0-4fa3-a4ac-7988e750f50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 905.816583] env[65503]: WARNING nova.compute.manager [None req-6b54bf38-f8e3-49b5-800f-3c8cb074f742 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Image not found during snapshot: nova.exception.ImageNotFound: Image e2f971c6-aded-4d92-880a-1d895555c7ca could not be found. [ 906.020904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.021082] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.021276] env[65503]: DEBUG nova.network.neutron [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 906.048752] env[65503]: DEBUG nova.compute.utils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 906.052810] env[65503]: DEBUG nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 906.078216] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f76997-1a25-78c7-7ab4-d3fbaa74d1a3, 'name': SearchDatastore_Task, 'duration_secs': 0.011452} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.078522] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.078783] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.078979] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.079134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.079309] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.079576] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcefbf48-2048-4c5f-8645-c0e34439e8f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.090536] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.090698] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.091698] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9da6eca0-8da7-4f1a-a80e-16b30a8385f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.101197] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 906.101197] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528c04a0-78c5-d64e-df34-dbdb6a2761a7" [ 906.101197] env[65503]: _type = "Task" [ 906.101197] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.111941] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528c04a0-78c5-d64e-df34-dbdb6a2761a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.123201] env[65503]: DEBUG oslo_concurrency.lockutils [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.123373] env[65503]: DEBUG oslo_concurrency.lockutils [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.123542] env[65503]: DEBUG nova.network.neutron [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 906.123728] env[65503]: DEBUG nova.objects.instance [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'info_cache' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.259967] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450243, 'name': Rename_Task, 'duration_secs': 0.14018} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.260579] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.261221] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b407a6c-91fa-4c48-b8a9-91075cb96bba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.270011] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 906.270011] env[65503]: value = "task-4450244" [ 906.270011] env[65503]: _type = "Task" [ 906.270011] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.280521] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.310952] env[65503]: DEBUG oslo_concurrency.lockutils [req-ab4ebc1a-8834-4f83-a37a-0031d14e46f5 req-f2920c75-820b-4546-a7fd-c53f18e389e2 service nova] Releasing lock "refresh_cache-236e651f-6a27-4601-8a76-ca1619e32dc6" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.525228] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 906.525635] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 906.553735] env[65503]: DEBUG nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 906.568558] env[65503]: DEBUG nova.network.neutron [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 906.590699] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 906.590977] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 906.598324] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "bc0c0066-b672-4385-8d68-c14e3635af4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.598557] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.598749] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "bc0c0066-b672-4385-8d68-c14e3635af4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.598917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.599086] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.604931] env[65503]: INFO nova.compute.manager [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Terminating instance [ 906.621981] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528c04a0-78c5-d64e-df34-dbdb6a2761a7, 'name': SearchDatastore_Task, 'duration_secs': 0.01092} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.625767] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-515b1303-f4ef-432d-8a9f-bf211784a24c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.629295] env[65503]: DEBUG nova.objects.base [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 906.638577] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 906.638577] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286ebe4-0389-0727-ad16-731bcc464931" [ 906.638577] env[65503]: _type = "Task" [ 906.638577] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.650435] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286ebe4-0389-0727-ad16-731bcc464931, 'name': SearchDatastore_Task} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.650646] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.650923] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 236e651f-6a27-4601-8a76-ca1619e32dc6/236e651f-6a27-4601-8a76-ca1619e32dc6.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.651456] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb614661-a45b-4329-96ab-bbd622dc7086 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.659408] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 906.659408] env[65503]: value = "task-4450245" [ 906.659408] env[65503]: _type = "Task" [ 906.659408] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.670759] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.686480] env[65503]: WARNING neutronclient.v2_0.client [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 906.687182] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 906.687634] env[65503]: WARNING openstack [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 906.781376] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450244, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.811993] env[65503]: DEBUG nova.network.neutron [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Updating instance_info_cache with network_info: [{"id": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "address": "fa:16:3e:6e:8d:d9", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1eaff02-bb", "ovs_interfaceid": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 906.969010] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2e0741-78aa-4673-b449-30a44f3097fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.979615] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209bbe64-796d-47fc-86e9-52d476596cd6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.018432] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b73abdc-fda2-41eb-a803-52612cb6d03a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.028069] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc01af63-3ba6-422e-aaaf-399a26b70d70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.048944] env[65503]: DEBUG nova.compute.provider_tree [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.114047] env[65503]: DEBUG nova.compute.manager [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 907.114047] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.115367] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54559ad-3a68-4331-821b-f79d53d68ef9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.124404] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.124697] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d6da356-50c3-41fa-8eff-386e488f1184 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.132019] env[65503]: WARNING neutronclient.v2_0.client [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 907.132828] env[65503]: WARNING openstack [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 907.133235] env[65503]: WARNING openstack [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 907.142715] env[65503]: DEBUG oslo_vmware.api [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 907.142715] env[65503]: value = "task-4450246" [ 907.142715] env[65503]: _type = "Task" [ 907.142715] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.152686] env[65503]: DEBUG oslo_vmware.api [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.170577] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450245, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.285637] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450244, 'name': PowerOnVM_Task, 'duration_secs': 0.714808} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.285949] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.286210] env[65503]: INFO nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Took 8.65 seconds to spawn the instance on the hypervisor. [ 907.287983] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 907.287983] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72965d8-effa-4303-a676-8823ea7c506f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.291425] env[65503]: WARNING openstack [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 907.291805] env[65503]: WARNING openstack [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 907.315234] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.315580] env[65503]: DEBUG nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Instance network_info: |[{"id": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "address": "fa:16:3e:6e:8d:d9", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1eaff02-bb", "ovs_interfaceid": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 907.316061] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:8d:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1eaff02-bbf6-47d2-9655-443ca8021c5a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.326877] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 907.327171] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.327452] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3c43530-9d8d-4091-bef7-b5234eed3245 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.356965] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.356965] env[65503]: value = "task-4450247" [ 907.356965] env[65503]: _type = "Task" [ 907.356965] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.367071] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.388667] env[65503]: WARNING neutronclient.v2_0.client [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 907.389454] env[65503]: WARNING openstack [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 907.389454] env[65503]: WARNING openstack [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 907.487381] env[65503]: DEBUG nova.network.neutron [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updating instance_info_cache with network_info: [{"id": "12c029b6-d630-419b-8167-53eb6612a069", "address": "fa:16:3e:47:e5:1b", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12c029b6-d6", "ovs_interfaceid": "12c029b6-d630-419b-8167-53eb6612a069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 907.554363] env[65503]: DEBUG nova.scheduler.client.report [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.566282] env[65503]: DEBUG nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 907.594822] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 907.595117] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.595299] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 907.595534] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.595688] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 907.595865] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 907.596099] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.596258] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 907.596476] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 907.596598] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 907.596839] env[65503]: DEBUG nova.virt.hardware [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 907.598258] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037aca11-1316-4c08-a500-e0faeeb18bdd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.608148] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ea0ca6-074d-4fda-a073-3d6d456424f6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.625466] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.631306] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Creating folder: Project (cf7031e21a544fe4bfd7d85a79f5e5d8). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 907.631777] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c83ee463-263b-439a-a48a-9f7989556df7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.644478] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Created folder: Project (cf7031e21a544fe4bfd7d85a79f5e5d8) in parent group-v870190. [ 907.644606] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Creating folder: Instances. Parent ref: group-v870400. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 907.647921] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85a6658b-4eb5-42f6-90dd-2fb1d969fbce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.655531] env[65503]: DEBUG oslo_vmware.api [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450246, 'name': PowerOffVM_Task, 'duration_secs': 0.218787} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.655808] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.655948] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.656195] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a74f8f99-d01d-4e6a-9cc9-4a1ccf42c43c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.660579] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Created folder: Instances in parent group-v870400. [ 907.661019] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 907.663889] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.664147] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eed5663c-a775-4cc2-96a0-d463d2db6e2f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.679283] env[65503]: DEBUG nova.compute.manager [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Received event network-changed-c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 907.679532] env[65503]: DEBUG nova.compute.manager [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Refreshing instance network info cache due to event network-changed-c1eaff02-bbf6-47d2-9655-443ca8021c5a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 907.679773] env[65503]: DEBUG oslo_concurrency.lockutils [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Acquiring lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.679918] env[65503]: DEBUG oslo_concurrency.lockutils [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Acquired lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.680116] env[65503]: DEBUG nova.network.neutron [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Refreshing network info cache for port c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 907.686480] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450245, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525403} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.686782] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 236e651f-6a27-4601-8a76-ca1619e32dc6/236e651f-6a27-4601-8a76-ca1619e32dc6.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.686994] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.687268] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e74efc5-89c1-4364-aea7-a712169219cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.693193] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.693193] env[65503]: value = "task-4450251" [ 907.693193] env[65503]: _type = "Task" [ 907.693193] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.700031] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 907.700031] env[65503]: value = "task-4450252" [ 907.700031] env[65503]: _type = "Task" [ 907.700031] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.707167] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450251, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.712726] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.725095] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.725095] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.725095] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleting the datastore file [datastore1] bc0c0066-b672-4385-8d68-c14e3635af4e {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.725427] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-090c74b3-bd39-4354-a91a-16be26ef412e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.733625] env[65503]: DEBUG oslo_vmware.api [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 907.733625] env[65503]: value = "task-4450253" [ 907.733625] env[65503]: _type = "Task" [ 907.733625] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.742588] env[65503]: DEBUG oslo_vmware.api [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.817075] env[65503]: INFO nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Took 41.05 seconds to build instance. [ 907.868516] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.991744] env[65503]: DEBUG oslo_concurrency.lockutils [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.060095] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.060693] env[65503]: DEBUG nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 908.063793] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.584s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.064033] env[65503]: DEBUG nova.objects.instance [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lazy-loading 'resources' on Instance uuid 38e9a714-87f8-422c-9cc5-09b6aec76198 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.183560] env[65503]: WARNING neutronclient.v2_0.client [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 908.184420] env[65503]: WARNING openstack [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 908.184706] env[65503]: WARNING openstack [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 908.210017] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450251, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.216594] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179788} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.216932] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.217786] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5592710-7193-4e10-87c6-d37dbf9d9834 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.243732] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 236e651f-6a27-4601-8a76-ca1619e32dc6/236e651f-6a27-4601-8a76-ca1619e32dc6.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.249725] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e1152f4-7e32-46e5-bb57-e83cfc400af4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.271054] env[65503]: DEBUG oslo_vmware.api [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145816} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.272475] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.272685] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.272868] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.273087] env[65503]: INFO nova.compute.manager [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 908.273374] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 908.273644] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 908.273644] env[65503]: value = "task-4450254" [ 908.273644] env[65503]: _type = "Task" [ 908.273644] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.273831] env[65503]: DEBUG nova.compute.manager [-] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 908.274120] env[65503]: DEBUG nova.network.neutron [-] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 908.274445] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 908.275160] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 908.275356] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 908.294708] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450254, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.318469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.565s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.320666] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 908.368595] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.405596] env[65503]: WARNING openstack [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 908.405951] env[65503]: WARNING openstack [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 908.492548] env[65503]: WARNING neutronclient.v2_0.client [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 908.493675] env[65503]: WARNING openstack [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 908.493885] env[65503]: WARNING openstack [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 908.567463] env[65503]: DEBUG nova.compute.utils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 908.572494] env[65503]: DEBUG nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 908.572802] env[65503]: DEBUG nova.network.neutron [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 908.573141] env[65503]: WARNING neutronclient.v2_0.client [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 908.573572] env[65503]: WARNING neutronclient.v2_0.client [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 908.574315] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 908.574676] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 908.602687] env[65503]: DEBUG nova.network.neutron [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Updated VIF entry in instance network info cache for port c1eaff02-bbf6-47d2-9655-443ca8021c5a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 908.603109] env[65503]: DEBUG nova.network.neutron [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Updating instance_info_cache with network_info: [{"id": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "address": "fa:16:3e:6e:8d:d9", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1eaff02-bb", "ovs_interfaceid": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 908.644363] env[65503]: DEBUG nova.policy [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 908.648362] env[65503]: DEBUG nova.compute.manager [req-19fc989d-4663-450b-9626-b196336fdcb2 req-bf16ea43-1354-40a8-ba91-5e3438abbb6a service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Received event network-vif-deleted-a95339cb-0433-44f3-992a-1680008ef082 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 908.648559] env[65503]: INFO nova.compute.manager [req-19fc989d-4663-450b-9626-b196336fdcb2 req-bf16ea43-1354-40a8-ba91-5e3438abbb6a service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Neutron deleted interface a95339cb-0433-44f3-992a-1680008ef082; detaching it from the instance and deleting it from the info cache [ 908.648720] env[65503]: DEBUG nova.network.neutron [req-19fc989d-4663-450b-9626-b196336fdcb2 req-bf16ea43-1354-40a8-ba91-5e3438abbb6a service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 908.708774] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450251, 'name': CreateVM_Task, 'duration_secs': 0.75572} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.711523] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 908.712395] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.712520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.712952] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 908.713211] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e58f4248-22c1-42e7-bba5-4bd7ba29a930 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.720585] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 908.720585] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521c9f24-0fba-d84c-7052-4147e75d3819" [ 908.720585] env[65503]: _type = "Task" [ 908.720585] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.735421] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521c9f24-0fba-d84c-7052-4147e75d3819, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.796228] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450254, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.873683] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.937610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.937847] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.938270] env[65503]: INFO nova.compute.manager [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Shelving [ 908.964881] env[65503]: DEBUG nova.network.neutron [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Successfully created port: a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 908.980085] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1cd7a4-513b-459c-94a1-1bd93484c736 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.990348] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc75d154-28ef-4f24-bf7d-fce107158b6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.025612] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.027226] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff4cc05b-96ed-46ba-833f-8dd2d1dc633f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.029203] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6666f0d-85a6-41b8-8d96-af37fb5d2dd8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.040774] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d591c5-9b85-4039-b96a-0ac316cf46bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.046471] env[65503]: DEBUG oslo_vmware.api [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 909.046471] env[65503]: value = "task-4450255" [ 909.046471] env[65503]: _type = "Task" [ 909.046471] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.057336] env[65503]: DEBUG nova.compute.provider_tree [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.068857] env[65503]: DEBUG oslo_vmware.api [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450255, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.072759] env[65503]: DEBUG nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 909.076806] env[65503]: DEBUG nova.network.neutron [-] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 909.107193] env[65503]: DEBUG oslo_concurrency.lockutils [req-a3ec1b02-68fd-4e3e-8559-4a8f77d13ca6 req-251c40a7-46d7-43a8-aedf-76c2c9601881 service nova] Releasing lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.152420] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0be7643-a045-4496-880c-1c772236609b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.166399] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686360b4-c1c2-4778-9fa6-0f9499266c96 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.205038] env[65503]: DEBUG nova.compute.manager [req-19fc989d-4663-450b-9626-b196336fdcb2 req-bf16ea43-1354-40a8-ba91-5e3438abbb6a service nova] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Detach interface failed, port_id=a95339cb-0433-44f3-992a-1680008ef082, reason: Instance bc0c0066-b672-4385-8d68-c14e3635af4e could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 909.234946] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521c9f24-0fba-d84c-7052-4147e75d3819, 'name': SearchDatastore_Task, 'duration_secs': 0.031317} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.235931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.235931] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 909.235931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.235931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.236195] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 909.236571] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e688efd-c0e4-43dc-894f-459c4fa39ac2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.248089] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 909.248301] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 909.249115] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96a03e9f-fea9-4d71-863c-8c1cdaa23990 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.256311] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 909.256311] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e73140-145a-9661-4867-411c6267a00d" [ 909.256311] env[65503]: _type = "Task" [ 909.256311] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.266479] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e73140-145a-9661-4867-411c6267a00d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.294559] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450254, 'name': ReconfigVM_Task, 'duration_secs': 0.571387} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.294855] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 236e651f-6a27-4601-8a76-ca1619e32dc6/236e651f-6a27-4601-8a76-ca1619e32dc6.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.295584] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15527430-4b83-4f7d-9fa5-a7c00234b156 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.304564] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 909.304564] env[65503]: value = "task-4450256" [ 909.304564] env[65503]: _type = "Task" [ 909.304564] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.317938] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450256, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.369382] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.556696] env[65503]: DEBUG oslo_vmware.api [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450255, 'name': PowerOnVM_Task, 'duration_secs': 0.487126} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.556696] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.556696] env[65503]: DEBUG nova.compute.manager [None req-130dcc5a-be7d-4b66-817e-210c1ef4252c tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 909.557102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9202f5a4-61ca-482c-9f6a-9fce0c6f02e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.561824] env[65503]: DEBUG nova.scheduler.client.report [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.581244] env[65503]: INFO nova.compute.manager [-] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Took 1.31 seconds to deallocate network for instance. [ 909.771829] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e73140-145a-9661-4867-411c6267a00d, 'name': SearchDatastore_Task, 'duration_secs': 0.012129} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.772705] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d03aac63-f9c5-4a7f-8244-e4d4ed657a99 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.778795] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 909.778795] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526fad44-c444-32d3-6bab-f7e1390e4930" [ 909.778795] env[65503]: _type = "Task" [ 909.778795] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.788064] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526fad44-c444-32d3-6bab-f7e1390e4930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.814210] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450256, 'name': Rename_Task, 'duration_secs': 0.179884} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.814500] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.814757] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fb1e28b-158a-4955-bd64-2e27b7314dd2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.822504] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 909.822504] env[65503]: value = "task-4450257" [ 909.822504] env[65503]: _type = "Task" [ 909.822504] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.832596] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.871907] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.952043] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.952043] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4393c929-1dd1-4ca7-8348-95b112693f5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.959754] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 909.959754] env[65503]: value = "task-4450258" [ 909.959754] env[65503]: _type = "Task" [ 909.959754] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.969504] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.070726] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.073789] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.750s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.073899] env[65503]: DEBUG nova.objects.instance [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 910.084078] env[65503]: DEBUG nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 910.088433] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.110888] env[65503]: INFO nova.scheduler.client.report [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Deleted allocations for instance 38e9a714-87f8-422c-9cc5-09b6aec76198 [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 910.130077] env[65503]: DEBUG nova.virt.hardware [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 910.131088] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a963960-07be-469d-ab23-c20ed16a7f09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.141767] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e162465-8a44-41e3-9b09-8e658c2c7923 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.290937] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526fad44-c444-32d3-6bab-f7e1390e4930, 'name': SearchDatastore_Task, 'duration_secs': 0.009715} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.291270] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.291547] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 910.291823] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e416ab2-288a-4bd9-a128-2b38b7c50bab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.300265] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 910.300265] env[65503]: value = "task-4450259" [ 910.300265] env[65503]: _type = "Task" [ 910.300265] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.310629] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.335211] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450257, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.373505] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.471907] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450258, 'name': PowerOffVM_Task, 'duration_secs': 0.292126} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.473383] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.473480] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90545dde-3025-44a8-a822-7bebeeb4b90f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.497203] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e53e82-f799-4ac4-ac88-e6a913e9a7c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.502600] env[65503]: DEBUG nova.compute.manager [req-49e1dde1-3864-4f2f-903c-1a781408b66c req-c49ccc21-efa8-43b2-93ba-24bc6328da03 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-vif-plugged-a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 910.502819] env[65503]: DEBUG oslo_concurrency.lockutils [req-49e1dde1-3864-4f2f-903c-1a781408b66c req-c49ccc21-efa8-43b2-93ba-24bc6328da03 service nova] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.503079] env[65503]: DEBUG oslo_concurrency.lockutils [req-49e1dde1-3864-4f2f-903c-1a781408b66c req-c49ccc21-efa8-43b2-93ba-24bc6328da03 service nova] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.503250] env[65503]: DEBUG oslo_concurrency.lockutils [req-49e1dde1-3864-4f2f-903c-1a781408b66c req-c49ccc21-efa8-43b2-93ba-24bc6328da03 service nova] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.503415] env[65503]: DEBUG nova.compute.manager [req-49e1dde1-3864-4f2f-903c-1a781408b66c req-c49ccc21-efa8-43b2-93ba-24bc6328da03 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] No waiting events found dispatching network-vif-plugged-a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 910.503698] env[65503]: WARNING nova.compute.manager [req-49e1dde1-3864-4f2f-903c-1a781408b66c req-c49ccc21-efa8-43b2-93ba-24bc6328da03 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received unexpected event network-vif-plugged-a52973b6-14a8-480e-8ad4-92719252801c for instance with vm_state building and task_state spawning. [ 910.627119] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21dd1f79-df76-41fc-9b18-ba414cb927e4 tempest-ServersTestBootFromVolume-329786802 tempest-ServersTestBootFromVolume-329786802-project-member] Lock "38e9a714-87f8-422c-9cc5-09b6aec76198" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.355s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.630310] env[65503]: DEBUG nova.network.neutron [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Successfully updated port: a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 910.812199] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450259, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498169} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.812560] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.812784] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 910.813163] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c8dacab-9dd0-4fb4-8321-4c05d3beb504 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.821851] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 910.821851] env[65503]: value = "task-4450260" [ 910.821851] env[65503]: _type = "Task" [ 910.821851] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.835444] env[65503]: DEBUG oslo_vmware.api [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450257, 'name': PowerOnVM_Task, 'duration_secs': 0.766939} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.838685] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.839479] env[65503]: INFO nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Took 8.41 seconds to spawn the instance on the hypervisor. [ 910.839479] env[65503]: DEBUG nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 910.839479] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450260, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.840317] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0428b64-eb23-4fb7-8824-edbbd4c28353 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.873186] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.014873] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 911.014873] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f0a3ea23-5732-40c3-9fde-53f30fba3e95 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.024786] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 911.024786] env[65503]: value = "task-4450261" [ 911.024786] env[65503]: _type = "Task" [ 911.024786] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.034548] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450261, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.085987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-76a4d701-ae24-4e76-bf3e-41182acb6096 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.087275] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 22.253s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.134284] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.134479] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.134654] env[65503]: DEBUG nova.network.neutron [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 911.332659] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450260, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067362} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.333625] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 911.334070] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c24da98-de2e-4a3c-b60b-840a115a9d2f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.357540] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.362711] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4433ed5-7e63-4125-9971-9a60f9a034a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.385257] env[65503]: INFO nova.compute.manager [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Took 44.52 seconds to build instance. [ 911.393524] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.395878] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 911.395878] env[65503]: value = "task-4450262" [ 911.395878] env[65503]: _type = "Task" [ 911.395878] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.406622] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450262, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.546548] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450261, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.592303] env[65503]: INFO nova.compute.claims [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.637913] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 911.638523] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 911.696323] env[65503]: DEBUG nova.network.neutron [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 911.717313] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 911.717497] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 911.788479] env[65503]: WARNING neutronclient.v2_0.client [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 911.789441] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 911.789917] env[65503]: WARNING openstack [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 911.893150] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ad99895-8c4c-4935-a16c-37a827051cfe tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.046s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.893826] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.905666] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450262, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.913230] env[65503]: DEBUG nova.network.neutron [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 912.037108] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450261, 'name': CreateSnapshot_Task, 'duration_secs': 0.938421} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.037742] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 912.038386] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6dfae0-20e5-4ea6-892e-b1eaba695322 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.099459] env[65503]: INFO nova.compute.resource_tracker [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating resource usage from migration b9d122d6-a2f3-4147-a585-2287e9d12e22 [ 912.216162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.217324] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.217324] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.217324] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.217324] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.222266] env[65503]: INFO nova.compute.manager [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Terminating instance [ 912.309806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "236e651f-6a27-4601-8a76-ca1619e32dc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.309806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.310033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "236e651f-6a27-4601-8a76-ca1619e32dc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.310338] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.310460] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.314842] env[65503]: INFO nova.compute.manager [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Terminating instance [ 912.390592] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.407224] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450262, 'name': ReconfigVM_Task, 'duration_secs': 0.750864} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.407224] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.407487] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07f98339-ebf0-478d-a23f-c7da6125c896 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.414683] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.415042] env[65503]: DEBUG nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Instance network_info: |[{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 912.415506] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:61:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a52973b6-14a8-480e-8ad4-92719252801c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.425251] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 912.429420] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 912.429769] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 912.429769] env[65503]: value = "task-4450263" [ 912.429769] env[65503]: _type = "Task" [ 912.429769] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.430227] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4c47248-644e-4a7e-9087-3542d483c6d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.463736] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450263, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.467863] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.467863] env[65503]: value = "task-4450264" [ 912.467863] env[65503]: _type = "Task" [ 912.467863] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.479670] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450264, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.543815] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b038f8-efd3-49bc-9f03-6845625fd127 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.558922] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 912.560460] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dad61d2a-809c-4c00-8669-9a5a561a9463 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.564585] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c087192-3de6-4bbf-bc31-de55ab144b8e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.568775] env[65503]: DEBUG nova.compute.manager [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-changed-a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 912.568964] env[65503]: DEBUG nova.compute.manager [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing instance network info cache due to event network-changed-a52973b6-14a8-480e-8ad4-92719252801c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 912.569196] env[65503]: DEBUG oslo_concurrency.lockutils [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.569328] env[65503]: DEBUG oslo_concurrency.lockutils [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.569482] env[65503]: DEBUG nova.network.neutron [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing network info cache for port a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 912.603049] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34995a48-c5e1-4fda-98a4-f7f701ebdf43 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.606016] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 912.606016] env[65503]: value = "task-4450265" [ 912.606016] env[65503]: _type = "Task" [ 912.606016] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.616704] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b71281-04f2-4fee-8805-b7bb9922f98d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.625499] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450265, 'name': CloneVM_Task} progress is 12%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.638933] env[65503]: DEBUG nova.compute.provider_tree [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.728583] env[65503]: DEBUG nova.compute.manager [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 912.728877] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.730206] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06faa1e9-31b7-4a30-9724-832795d7defa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.741191] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.741592] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d595692-b636-4977-9a4d-3fc0c4b2d851 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.749976] env[65503]: DEBUG oslo_vmware.api [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 912.749976] env[65503]: value = "task-4450266" [ 912.749976] env[65503]: _type = "Task" [ 912.749976] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.762534] env[65503]: DEBUG oslo_vmware.api [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450266, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.822445] env[65503]: DEBUG nova.compute.manager [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 912.823617] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.824101] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f42d38-1ecf-49ca-83a3-e16ea4580dbe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.834562] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.834934] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ab04e74-e853-41a2-987e-f6a549dab3dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.844607] env[65503]: DEBUG oslo_vmware.api [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 912.844607] env[65503]: value = "task-4450267" [ 912.844607] env[65503]: _type = "Task" [ 912.844607] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.857586] env[65503]: DEBUG oslo_vmware.api [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.898762] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450247, 'name': CreateVM_Task, 'duration_secs': 5.148065} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.899081] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.899836] env[65503]: WARNING neutronclient.v2_0.client [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 912.900368] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.900553] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.900979] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 912.901376] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a20bef3-17be-4692-88ca-1cf02eec6cba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.908311] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 912.908311] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a4dcc1-b18d-8ce8-f0e9-b439d8f3a105" [ 912.908311] env[65503]: _type = "Task" [ 912.908311] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.921249] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a4dcc1-b18d-8ce8-f0e9-b439d8f3a105, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.960584] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450263, 'name': Rename_Task, 'duration_secs': 0.238705} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.961092] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 912.961543] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-238d3857-d778-4ffc-81d8-b9ff58e0de49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.977737] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 912.977737] env[65503]: value = "task-4450268" [ 912.977737] env[65503]: _type = "Task" [ 912.977737] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.992824] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450264, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.002507] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450268, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.073039] env[65503]: WARNING neutronclient.v2_0.client [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 913.073529] env[65503]: WARNING openstack [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 913.073934] env[65503]: WARNING openstack [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 913.118629] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450265, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.141991] env[65503]: DEBUG nova.scheduler.client.report [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.262909] env[65503]: DEBUG oslo_vmware.api [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450266, 'name': PowerOffVM_Task, 'duration_secs': 0.276176} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.263078] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.263246] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.263576] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bc73420-082b-4ca9-b247-52ac28058333 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.288324] env[65503]: WARNING openstack [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 913.288819] env[65503]: WARNING openstack [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 913.363026] env[65503]: DEBUG oslo_vmware.api [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450267, 'name': PowerOffVM_Task, 'duration_secs': 0.220359} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.363652] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.363769] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.364147] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c691204-a245-415b-a855-54e5b99c36ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.398530] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.398530] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.398530] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleting the datastore file [datastore2] d7dd714a-a738-4d68-bbf0-32daf4a1c49b {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.398924] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-602d26de-1fce-49be-9423-2786858b8969 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.406640] env[65503]: DEBUG oslo_vmware.api [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 913.406640] env[65503]: value = "task-4450271" [ 913.406640] env[65503]: _type = "Task" [ 913.406640] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.414676] env[65503]: WARNING neutronclient.v2_0.client [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 913.415389] env[65503]: WARNING openstack [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 913.415739] env[65503]: WARNING openstack [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 913.427367] env[65503]: DEBUG oslo_vmware.api [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.433382] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a4dcc1-b18d-8ce8-f0e9-b439d8f3a105, 'name': SearchDatastore_Task, 'duration_secs': 0.013529} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.433701] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.433967] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.434238] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.434396] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.434585] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 913.434879] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-273c0395-4e72-4d5c-b2bf-7a9a371612f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.445510] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 913.445717] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 913.446678] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-571c74c2-72d7-4089-99ef-e302e760ffdf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.453531] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 913.453531] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525c8584-9953-908e-b42d-07bfbf7f22e6" [ 913.453531] env[65503]: _type = "Task" [ 913.453531] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.463584] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525c8584-9953-908e-b42d-07bfbf7f22e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.478463] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.478679] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.478854] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleting the datastore file [datastore1] 236e651f-6a27-4601-8a76-ca1619e32dc6 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.482724] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f0de495-4894-4da0-85f7-04a46493b7b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.484833] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450264, 'name': CreateVM_Task, 'duration_secs': 0.716297} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.487905] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 913.488828] env[65503]: WARNING neutronclient.v2_0.client [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 913.489270] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.489384] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.489665] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 913.490617] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ed6a587-36ec-4fb0-a156-b793d544d216 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.499793] env[65503]: DEBUG oslo_vmware.api [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for the task: (returnval){ [ 913.499793] env[65503]: value = "task-4450272" [ 913.499793] env[65503]: _type = "Task" [ 913.499793] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.500047] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450268, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.502071] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 913.502071] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5229cd8d-9285-0777-6de9-19c405673ead" [ 913.502071] env[65503]: _type = "Task" [ 913.502071] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.520511] env[65503]: DEBUG oslo_vmware.api [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450272, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.524214] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5229cd8d-9285-0777-6de9-19c405673ead, 'name': SearchDatastore_Task, 'duration_secs': 0.012699} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.524496] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.524717] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.524919] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.557456] env[65503]: DEBUG nova.network.neutron [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updated VIF entry in instance network info cache for port a52973b6-14a8-480e-8ad4-92719252801c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 913.557876] env[65503]: DEBUG nova.network.neutron [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 913.620536] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450265, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.649704] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.562s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.650417] env[65503]: INFO nova.compute.manager [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Migrating [ 913.661248] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.433s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.661925] env[65503]: DEBUG nova.objects.instance [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lazy-loading 'resources' on Instance uuid 0001f4db-3073-411c-8d60-6d8528ef263a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.917901] env[65503]: DEBUG oslo_vmware.api [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219387} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.918219] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.918405] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.918582] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.918752] env[65503]: INFO nova.compute.manager [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Took 1.19 seconds to destroy the instance on the hypervisor. [ 913.919469] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 913.919469] env[65503]: DEBUG nova.compute.manager [-] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 913.919469] env[65503]: DEBUG nova.network.neutron [-] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 913.919679] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 913.920787] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 913.920787] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 913.965068] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525c8584-9953-908e-b42d-07bfbf7f22e6, 'name': SearchDatastore_Task, 'duration_secs': 0.012525} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.965733] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 913.968964] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7e1f556-5f45-439f-9c52-07cac8be3871 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.974661] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 913.974661] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ec6d1-d26b-0d87-ac50-2e68c7c00321" [ 913.974661] env[65503]: _type = "Task" [ 913.974661] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.984563] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ec6d1-d26b-0d87-ac50-2e68c7c00321, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.993863] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450268, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.013550] env[65503]: DEBUG oslo_vmware.api [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Task: {'id': task-4450272, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210683} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.013815] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.014040] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.014263] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.014436] env[65503]: INFO nova.compute.manager [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Took 1.19 seconds to destroy the instance on the hypervisor. [ 914.014677] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 914.014872] env[65503]: DEBUG nova.compute.manager [-] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 914.014967] env[65503]: DEBUG nova.network.neutron [-] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 914.015258] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 914.015772] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 914.016035] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 914.056054] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 914.060783] env[65503]: DEBUG oslo_concurrency.lockutils [req-65ae561b-8d11-47c3-9721-7ad9d3d74e63 req-c4ab618f-f842-4cf0-943b-6b51048912c0 service nova] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.123767] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450265, 'name': CloneVM_Task, 'duration_secs': 1.382217} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.123767] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Created linked-clone VM from snapshot [ 914.124326] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a334572-e3ef-4884-ae2a-60abb2201368 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.133261] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Uploading image b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 914.159511] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 914.159511] env[65503]: value = "vm-870405" [ 914.159511] env[65503]: _type = "VirtualMachine" [ 914.159511] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 914.159971] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0560bd13-019e-4bcd-9ea3-d5af6a91ee11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.168460] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lease: (returnval){ [ 914.168460] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dae81b-077c-707a-4153-bfd88046b504" [ 914.168460] env[65503]: _type = "HttpNfcLease" [ 914.168460] env[65503]: } obtained for exporting VM: (result){ [ 914.168460] env[65503]: value = "vm-870405" [ 914.168460] env[65503]: _type = "VirtualMachine" [ 914.168460] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 914.168460] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the lease: (returnval){ [ 914.168460] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dae81b-077c-707a-4153-bfd88046b504" [ 914.168460] env[65503]: _type = "HttpNfcLease" [ 914.168460] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 914.174954] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.174954] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.175244] env[65503]: DEBUG nova.network.neutron [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 914.180578] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 914.180578] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dae81b-077c-707a-4153-bfd88046b504" [ 914.180578] env[65503]: _type = "HttpNfcLease" [ 914.180578] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 914.182709] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 914.182709] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52dae81b-077c-707a-4153-bfd88046b504" [ 914.182709] env[65503]: _type = "HttpNfcLease" [ 914.182709] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 914.183796] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737d8264-34ea-47e8-b688-07a28b739c81 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.194599] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523213cc-2be2-eb4c-c3c4-f59686993cf0/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 914.194802] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523213cc-2be2-eb4c-c3c4-f59686993cf0/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 914.315284] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-55940017-1f0d-41e8-a3fb-ae39343679a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.382067] env[65503]: DEBUG nova.compute.manager [req-6009829e-46ff-4ae9-92a7-6fbb7a1b44ca req-f03e28b4-42fa-42d9-91e0-ee8f98c90276 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Received event network-vif-deleted-76dfa38d-18b0-4fa3-a4ac-7988e750f50a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 914.382868] env[65503]: INFO nova.compute.manager [req-6009829e-46ff-4ae9-92a7-6fbb7a1b44ca req-f03e28b4-42fa-42d9-91e0-ee8f98c90276 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Neutron deleted interface 76dfa38d-18b0-4fa3-a4ac-7988e750f50a; detaching it from the instance and deleting it from the info cache [ 914.382868] env[65503]: DEBUG nova.network.neutron [req-6009829e-46ff-4ae9-92a7-6fbb7a1b44ca req-f03e28b4-42fa-42d9-91e0-ee8f98c90276 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 914.486415] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ec6d1-d26b-0d87-ac50-2e68c7c00321, 'name': SearchDatastore_Task, 'duration_secs': 0.010413} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.489097] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.489370] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 909b3535-9410-4820-a34d-6c0e9627f506/909b3535-9410-4820-a34d-6c0e9627f506.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 914.490016] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.490016] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.490146] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-266f21f4-6214-43d1-a17d-fac890bfd392 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.492678] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9689c7b3-e6af-4c92-b141-f49f53dbc3db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.498456] env[65503]: DEBUG oslo_vmware.api [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450268, 'name': PowerOnVM_Task, 'duration_secs': 1.327866} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.502436] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.503242] env[65503]: INFO nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Took 6.94 seconds to spawn the instance on the hypervisor. [ 914.503532] env[65503]: DEBUG nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 914.503925] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 914.503925] env[65503]: value = "task-4450274" [ 914.503925] env[65503]: _type = "Task" [ 914.503925] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.505818] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f0f96e-66ae-4c99-a6a3-e2a2fda37cf3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.508387] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.508553] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.512170] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c0bf5b5-0c52-466d-87d3-2a3ada817a14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.525658] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 914.525658] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522c614d-ef77-a5b2-01ab-76740e175245" [ 914.525658] env[65503]: _type = "Task" [ 914.525658] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.525658] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450274, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.546728] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522c614d-ef77-a5b2-01ab-76740e175245, 'name': SearchDatastore_Task, 'duration_secs': 0.018281} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.553010] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8a26338-605a-4072-9443-a6d9b42b053d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.560527] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 914.560527] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f6fee4-7193-d077-6b99-247fba04ce2a" [ 914.560527] env[65503]: _type = "Task" [ 914.560527] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.574528] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f6fee4-7193-d077-6b99-247fba04ce2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.636063] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503eca35-0b4f-45c4-ac8f-c0b272fad84e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.645430] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafd3832-2729-4b70-b73d-f329718926b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.654468] env[65503]: DEBUG nova.compute.manager [req-7b1edc65-1603-47a9-95d4-08d78dc28e02 req-48b28d59-c8db-4c29-b3f5-4b668ce6f35b service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Received event network-vif-deleted-ba7e7de1-3fbb-49ea-889a-72f446d61a64 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 914.654845] env[65503]: INFO nova.compute.manager [req-7b1edc65-1603-47a9-95d4-08d78dc28e02 req-48b28d59-c8db-4c29-b3f5-4b668ce6f35b service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Neutron deleted interface ba7e7de1-3fbb-49ea-889a-72f446d61a64; detaching it from the instance and deleting it from the info cache [ 914.654927] env[65503]: DEBUG nova.network.neutron [req-7b1edc65-1603-47a9-95d4-08d78dc28e02 req-48b28d59-c8db-4c29-b3f5-4b668ce6f35b service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 914.685029] env[65503]: WARNING neutronclient.v2_0.client [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 914.686153] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 914.686529] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 914.696840] env[65503]: DEBUG nova.network.neutron [-] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 914.698481] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36d11eb-6d88-46bd-980d-06da83d8f7ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.708702] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3c5387-eab1-47e9-98dd-1ca1c8c1f7bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.726508] env[65503]: DEBUG nova.compute.provider_tree [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.798527] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 914.799038] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 914.836932] env[65503]: DEBUG nova.network.neutron [-] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 914.872471] env[65503]: WARNING neutronclient.v2_0.client [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 914.873368] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 914.873723] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 914.888168] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac57b3ef-1714-4554-924d-c0eb148e22c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.901987] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a2dc33-bc85-4e50-9ef6-c61cdac61293 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.953519] env[65503]: DEBUG nova.compute.manager [req-6009829e-46ff-4ae9-92a7-6fbb7a1b44ca req-f03e28b4-42fa-42d9-91e0-ee8f98c90276 service nova] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Detach interface failed, port_id=76dfa38d-18b0-4fa3-a4ac-7988e750f50a, reason: Instance 236e651f-6a27-4601-8a76-ca1619e32dc6 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 914.992514] env[65503]: DEBUG nova.network.neutron [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance_info_cache with network_info: [{"id": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "address": "fa:16:3e:9b:39:b6", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19156670-d2", "ovs_interfaceid": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 915.021454] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450274, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.042164] env[65503]: INFO nova.compute.manager [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Took 39.17 seconds to build instance. [ 915.078019] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f6fee4-7193-d077-6b99-247fba04ce2a, 'name': SearchDatastore_Task, 'duration_secs': 0.017743} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.078792] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.079141] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 2a1587cd-8b47-439f-948c-d58a5dc8220e/2a1587cd-8b47-439f-948c-d58a5dc8220e.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.079586] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2129b45b-deff-438c-b327-1a7343ce1b04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.089862] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 915.089862] env[65503]: value = "task-4450275" [ 915.089862] env[65503]: _type = "Task" [ 915.089862] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.102755] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.196085] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03f4817e-e02e-49bb-8ea8-5480f279b080 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.202786] env[65503]: INFO nova.compute.manager [-] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Took 1.28 seconds to deallocate network for instance. [ 915.214789] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b0a8c7-387f-490d-a07e-790f9ab6c55a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.232832] env[65503]: DEBUG nova.scheduler.client.report [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.266785] env[65503]: DEBUG nova.compute.manager [req-7b1edc65-1603-47a9-95d4-08d78dc28e02 req-48b28d59-c8db-4c29-b3f5-4b668ce6f35b service nova] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Detach interface failed, port_id=ba7e7de1-3fbb-49ea-889a-72f446d61a64, reason: Instance d7dd714a-a738-4d68-bbf0-32daf4a1c49b could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 915.339205] env[65503]: INFO nova.compute.manager [-] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Took 1.32 seconds to deallocate network for instance. [ 915.496163] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.524404] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450274, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742362} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.524806] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 909b3535-9410-4820-a34d-6c0e9627f506/909b3535-9410-4820-a34d-6c0e9627f506.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 915.525227] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 915.525560] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9426d485-8558-49fd-8e17-38ba3d93ff03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.535796] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 915.535796] env[65503]: value = "task-4450276" [ 915.535796] env[65503]: _type = "Task" [ 915.535796] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.546559] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9c5ec383-8275-4e88-800a-3b2ac7d8207e tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.386s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.547054] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450276, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.601379] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.731198] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.743413] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.746538] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.091s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.746822] env[65503]: DEBUG nova.objects.instance [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lazy-loading 'resources' on Instance uuid 39266117-e82e-48ae-932a-be04b1a7351a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 915.779747] env[65503]: INFO nova.scheduler.client.report [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Deleted allocations for instance 0001f4db-3073-411c-8d60-6d8528ef263a [ 915.848654] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.047869] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450276, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.216095} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.049073] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.050057] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ded5dac-d482-4ee5-999a-0b68cb7f67ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.077560] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 909b3535-9410-4820-a34d-6c0e9627f506/909b3535-9410-4820-a34d-6c0e9627f506.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.078212] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17d5c131-ceea-48e6-9244-bfe67f9cab0c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.103311] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.104912] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 916.104912] env[65503]: value = "task-4450277" [ 916.104912] env[65503]: _type = "Task" [ 916.104912] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.115416] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450277, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.121440] env[65503]: INFO nova.compute.manager [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Rebuilding instance [ 916.168490] env[65503]: DEBUG nova.compute.manager [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 916.169966] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f901abac-d0aa-4342-ac22-b79916cc3b31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.295039] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2e3b2f9e-e9e3-481d-9981-3d71aa78da41 tempest-ServersListShow298Test-624083500 tempest-ServersListShow298Test-624083500-project-member] Lock "0001f4db-3073-411c-8d60-6d8528ef263a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.443s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.606041] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450275, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.616039] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.618040] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b65d85-c912-43f0-b056-dcc547b0731e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.626369] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b86f21a-a901-46c3-9a61-6db5262f0713 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.659575] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b956387b-c633-430b-9503-6654bcf946ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.668974] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd2f650-6e25-44df-9a81-29b8323bb45a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.687500] env[65503]: DEBUG nova.compute.provider_tree [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.014606] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940a5c11-fe42-4232-8243-aaabf71ca582 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.037899] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance '708ed8ab-0ec9-457c-966d-b11c55895981' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 917.107024] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450275, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.814002} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.107024] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 2a1587cd-8b47-439f-948c-d58a5dc8220e/2a1587cd-8b47-439f-948c-d58a5dc8220e.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.107024] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.110471] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4d7d42a4-afea-419a-a71c-a078222c2543 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.120040] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.121868] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 917.121868] env[65503]: value = "task-4450278" [ 917.121868] env[65503]: _type = "Task" [ 917.121868] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.133961] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.189967] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.190183] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e91bbe9-29de-4fd8-9fec-6c6d2e3bdff3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.192853] env[65503]: DEBUG nova.scheduler.client.report [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.203560] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 917.203560] env[65503]: value = "task-4450279" [ 917.203560] env[65503]: _type = "Task" [ 917.203560] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.215866] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.549664] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.550089] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e87f911d-ca4e-4c19-a393-8719d473d16e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.560039] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 917.560039] env[65503]: value = "task-4450280" [ 917.560039] env[65503]: _type = "Task" [ 917.560039] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.571132] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450280, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.618103] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450277, 'name': ReconfigVM_Task, 'duration_secs': 1.171818} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.618559] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 909b3535-9410-4820-a34d-6c0e9627f506/909b3535-9410-4820-a34d-6c0e9627f506.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.619387] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00292eae-3278-44b0-893b-3a9ad27abd71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.627741] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 917.627741] env[65503]: value = "task-4450281" [ 917.627741] env[65503]: _type = "Task" [ 917.627741] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.635192] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077508} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.635899] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.636930] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d86b056-cedc-4011-97e1-93c6e7cb1fd5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.643492] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450281, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.666317] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 2a1587cd-8b47-439f-948c-d58a5dc8220e/2a1587cd-8b47-439f-948c-d58a5dc8220e.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.666758] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ac38b39-4e39-4c64-a145-fcc546a5214a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.688962] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 917.688962] env[65503]: value = "task-4450282" [ 917.688962] env[65503]: _type = "Task" [ 917.688962] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.700252] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.954s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.702572] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450282, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.703130] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.460s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.703367] env[65503]: DEBUG nova.objects.instance [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lazy-loading 'resources' on Instance uuid 429b7542-c288-4a7a-9032-09881938b256 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.714395] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450279, 'name': PowerOffVM_Task, 'duration_secs': 0.204672} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.715350] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.716043] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.717151] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ba5076-8415-4a08-a20b-17c6d1ee0251 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.728579] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.729766] env[65503]: INFO nova.scheduler.client.report [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Deleted allocations for instance 39266117-e82e-48ae-932a-be04b1a7351a [ 917.730812] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2257e496-49f7-48fa-95a9-95870827258d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.775215] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.775465] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.775650] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Deleting the datastore file [datastore2] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.776025] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-724b8495-f002-4f8f-83fd-d90355f7c852 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.784506] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 917.784506] env[65503]: value = "task-4450284" [ 917.784506] env[65503]: _type = "Task" [ 917.784506] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.795420] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.071295] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450280, 'name': PowerOffVM_Task, 'duration_secs': 0.287309} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.071735] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.072027] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance '708ed8ab-0ec9-457c-966d-b11c55895981' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 918.139567] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450281, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.201129] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450282, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.240500] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d40334b-0a07-444a-9ee6-a03a463ea1fa tempest-MigrationsAdminTest-890018851 tempest-MigrationsAdminTest-890018851-project-member] Lock "39266117-e82e-48ae-932a-be04b1a7351a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.187s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.299211] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156299} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.299541] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.299866] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.299866] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.537440] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6f7939-abbd-45b7-af63-01f676a80c94 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.547456] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b13726-0ee7-436c-8183-31e1efbb9621 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.583070] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 918.583386] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.583570] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 918.583758] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.583899] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 918.584061] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 918.584289] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 918.584446] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 918.584606] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 918.584762] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 918.584924] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 918.590494] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e3bf103-23e2-4639-b3c8-075019b91199 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.602401] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe60a6f-6058-4e85-8cef-ea1e80a3b163 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.611479] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43e6c33-069d-4369-97a7-dce2ecd405f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.617127] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 918.617127] env[65503]: value = "task-4450285" [ 918.617127] env[65503]: _type = "Task" [ 918.617127] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.629098] env[65503]: DEBUG nova.compute.provider_tree [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.638774] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.647517] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450281, 'name': Rename_Task, 'duration_secs': 0.787648} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.647880] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.648144] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7267c4b0-822f-48c1-92c2-272f60d03993 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.656348] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 918.656348] env[65503]: value = "task-4450286" [ 918.656348] env[65503]: _type = "Task" [ 918.656348] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.665696] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.700394] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450282, 'name': ReconfigVM_Task, 'duration_secs': 0.955707} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.701387] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 2a1587cd-8b47-439f-948c-d58a5dc8220e/2a1587cd-8b47-439f-948c-d58a5dc8220e.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.701387] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f802352c-5f9d-418f-87ea-3a3342805dd6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.708965] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 918.708965] env[65503]: value = "task-4450287" [ 918.708965] env[65503]: _type = "Task" [ 918.708965] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.720195] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450287, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.130170] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450285, 'name': ReconfigVM_Task, 'duration_secs': 0.259812} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.130498] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance '708ed8ab-0ec9-457c-966d-b11c55895981' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 919.134973] env[65503]: DEBUG nova.scheduler.client.report [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.168648] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450286, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.220851] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450287, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.339171] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 919.339429] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 919.339578] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 919.339753] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 919.339938] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 919.340078] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 919.340251] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 919.340401] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 919.340676] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 919.340789] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 919.340900] env[65503]: DEBUG nova.virt.hardware [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 919.341833] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3177efc1-8b48-48d4-ace9-87d8630aa467 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.352116] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b02bf4-5910-49f8-b624-2401a67ab935 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.369534] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 919.376360] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 919.376360] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 919.376578] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18360d81-90de-4e56-8af4-a52821ae5242 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.398039] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 919.398039] env[65503]: value = "task-4450288" [ 919.398039] env[65503]: _type = "Task" [ 919.398039] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.406674] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450288, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.641683] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 919.641973] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 919.642157] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 919.642339] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 919.642494] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 919.642640] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 919.642842] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 919.643046] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 919.643262] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 919.643469] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 919.643698] env[65503]: DEBUG nova.virt.hardware [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 919.649943] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Reconfiguring VM instance instance-00000043 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 919.650845] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.948s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.652933] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e744fb40-6c22-43ae-8e06-c2d0304836c0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.666812] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.215s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.667234] env[65503]: DEBUG nova.objects.instance [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lazy-loading 'resources' on Instance uuid bcd845e2-5a89-4eef-bb76-33d69834bbc1 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.681754] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450286, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.683445] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 919.683445] env[65503]: value = "task-4450289" [ 919.683445] env[65503]: _type = "Task" [ 919.683445] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.693021] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450289, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.694067] env[65503]: INFO nova.scheduler.client.report [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Deleted allocations for instance 429b7542-c288-4a7a-9032-09881938b256 [ 919.720673] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450287, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.907767] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450288, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.186402] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450286, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.205221] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450289, 'name': ReconfigVM_Task, 'duration_secs': 0.323047} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.206134] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Reconfigured VM instance instance-00000043 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 920.210193] env[65503]: DEBUG oslo_concurrency.lockutils [None req-086b6a15-b66a-42fa-bcfc-fd33ea7b1d9a tempest-ServerRescueTestJSONUnderV235-1921510197 tempest-ServerRescueTestJSONUnderV235-1921510197-project-member] Lock "429b7542-c288-4a7a-9032-09881938b256" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.940s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.211975] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800a1208-48c1-4134-a343-33cb765d30a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.229298] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450287, 'name': Rename_Task, 'duration_secs': 1.110787} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.244105] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.252121] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 708ed8ab-0ec9-457c-966d-b11c55895981/708ed8ab-0ec9-457c-966d-b11c55895981.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.256040] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e17382e5-cd76-471a-aede-84b4ba81ee17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.257843] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b80133c-bf1d-4d57-9094-78127f50e1a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.278896] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 920.278896] env[65503]: value = "task-4450290" [ 920.278896] env[65503]: _type = "Task" [ 920.278896] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.280628] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 920.280628] env[65503]: value = "task-4450291" [ 920.280628] env[65503]: _type = "Task" [ 920.280628] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.294631] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.301789] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450291, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.414492] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450288, 'name': CreateVM_Task, 'duration_secs': 0.722278} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.416731] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 920.417271] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.417571] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.417865] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 920.418128] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ca2b5c0-5ef2-4f37-8d89-38bce8bfce63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.423905] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 920.423905] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52811d83-857b-a6f9-9610-6bf509ad53d1" [ 920.423905] env[65503]: _type = "Task" [ 920.423905] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.433625] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52811d83-857b-a6f9-9610-6bf509ad53d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.603571] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91995bcc-c10b-48a6-94fa-a4c2243ce03a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.613209] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038c9193-24d7-4b66-a262-e653d660624c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.648269] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462c509a-cc50-4b9c-97c9-74a40f435288 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.658263] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7723e11a-735a-4d58-aa35-1eb7f3bcfb61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.674447] env[65503]: DEBUG nova.compute.provider_tree [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.686628] env[65503]: DEBUG oslo_vmware.api [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450286, 'name': PowerOnVM_Task, 'duration_secs': 1.581764} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.687705] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.687920] env[65503]: INFO nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Took 15.68 seconds to spawn the instance on the hypervisor. [ 920.688118] env[65503]: DEBUG nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 920.688966] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bee6493-2b81-4169-8e3b-536c483c3ae1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.795373] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450290, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.798547] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450291, 'name': ReconfigVM_Task, 'duration_secs': 0.297104} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.798801] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 708ed8ab-0ec9-457c-966d-b11c55895981/708ed8ab-0ec9-457c-966d-b11c55895981.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 920.799080] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance '708ed8ab-0ec9-457c-966d-b11c55895981' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 920.938884] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52811d83-857b-a6f9-9610-6bf509ad53d1, 'name': SearchDatastore_Task, 'duration_secs': 0.020624} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.939227] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.939476] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 920.939904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.943118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.943118] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 920.943118] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-959f1667-44c8-4a0c-ae8e-e4262644db77 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.951752] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 920.951752] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 920.952252] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7115641-1db8-4bf0-933a-469e90e544d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.958186] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 920.958186] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc8b23-0be2-a825-2b0a-6734d19307b0" [ 920.958186] env[65503]: _type = "Task" [ 920.958186] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.966961] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc8b23-0be2-a825-2b0a-6734d19307b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.178507] env[65503]: DEBUG nova.scheduler.client.report [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.217412] env[65503]: INFO nova.compute.manager [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Took 48.98 seconds to build instance. [ 921.293229] env[65503]: DEBUG oslo_vmware.api [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450290, 'name': PowerOnVM_Task, 'duration_secs': 0.944762} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.293571] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.293775] env[65503]: INFO nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Took 11.21 seconds to spawn the instance on the hypervisor. [ 921.293950] env[65503]: DEBUG nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 921.295243] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6631c999-044f-4344-bdba-ca246de79024 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.308359] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f42af9-a30a-4602-b727-da57a95ab68f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.335853] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a822d1e2-2826-496d-9b81-35df08bff0ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.359629] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance '708ed8ab-0ec9-457c-966d-b11c55895981' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 921.471955] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc8b23-0be2-a825-2b0a-6734d19307b0, 'name': SearchDatastore_Task, 'duration_secs': 0.015416} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.472742] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2594bf57-7150-41bf-a441-659581aac572 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.479773] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 921.479773] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbe61f-1d71-6e46-431a-cec5b4fbeb84" [ 921.479773] env[65503]: _type = "Task" [ 921.479773] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.494051] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbe61f-1d71-6e46-431a-cec5b4fbeb84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.685771] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.019s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.688981] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.381s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.689416] env[65503]: DEBUG nova.objects.instance [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lazy-loading 'resources' on Instance uuid f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.713016] env[65503]: INFO nova.scheduler.client.report [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleted allocations for instance bcd845e2-5a89-4eef-bb76-33d69834bbc1 [ 921.720659] env[65503]: DEBUG oslo_concurrency.lockutils [None req-93bde74e-b7b5-4fd0-ac46-9f6d860c04c8 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "909b3535-9410-4820-a34d-6c0e9627f506" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.392s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.820194] env[65503]: INFO nova.compute.manager [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Took 44.34 seconds to build instance. [ 921.868331] env[65503]: WARNING neutronclient.v2_0.client [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 921.993823] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bbe61f-1d71-6e46-431a-cec5b4fbeb84, 'name': SearchDatastore_Task, 'duration_secs': 0.02238} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.994243] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.994535] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 921.994837] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6ac252d-c9aa-4551-ac8f-5e6ff74482d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.004221] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 922.004221] env[65503]: value = "task-4450292" [ 922.004221] env[65503]: _type = "Task" [ 922.004221] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.014719] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.133551] env[65503]: DEBUG nova.network.neutron [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Port 19156670-d2c9-45a9-b9a1-2ab187cf5f4f binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 922.224867] env[65503]: DEBUG oslo_concurrency.lockutils [None req-459adea6-f8c7-4a6d-8cce-9e1f4d93a145 tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bcd845e2-5a89-4eef-bb76-33d69834bbc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.864s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.324556] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e3828862-c817-4380-bc1c-5abbaf74cb10 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.858s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.516250] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450292, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.543275] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df0f4c3-f848-475e-9f8b-74161f8d34f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.553088] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a254ed7b-53be-4122-a964-c412602cc519 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.593411] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bfb16a-4a47-44c0-9840-ed9635de81e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.602724] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bda32c-f352-4e1e-8ba1-55691c7a7fcb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.620677] env[65503]: DEBUG nova.compute.provider_tree [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.016571] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712391} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.016873] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.017134] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.017414] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-810e3fa3-efd9-4757-9ff1-cb2339025ba7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.026995] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 923.026995] env[65503]: value = "task-4450293" [ 923.026995] env[65503]: _type = "Task" [ 923.026995] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.037485] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450293, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.124862] env[65503]: DEBUG nova.scheduler.client.report [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.166699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.167164] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.167164] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.539321] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450293, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167127} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.539710] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 923.540534] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90d34d7-0b2a-4748-a7ea-63c5904e4990 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.564516] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 923.564956] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85667222-6b8b-4a39-97ac-b5a9e417ec3a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.590481] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 923.590481] env[65503]: value = "task-4450294" [ 923.590481] env[65503]: _type = "Task" [ 923.590481] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.598396] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.634859] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.637632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.947s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.637842] env[65503]: DEBUG nova.objects.instance [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 923.665224] env[65503]: INFO nova.scheduler.client.report [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Deleted allocations for instance f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee [ 923.900094] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "bdbae548-eefc-4e59-8053-f4b8e232580d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.900094] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.900094] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "bdbae548-eefc-4e59-8053-f4b8e232580d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.901388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.901807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.910599] env[65503]: INFO nova.compute.manager [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Terminating instance [ 924.103439] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450294, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.175451] env[65503]: WARNING neutronclient.v2_0.client [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 924.178822] env[65503]: DEBUG oslo_concurrency.lockutils [None req-57ece9b6-fda5-4db5-a4fa-4e51c6fc0681 tempest-ServerGroupTestJSON-1399806409 tempest-ServerGroupTestJSON-1399806409-project-member] Lock "f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.559s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.261707] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.261955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.262172] env[65503]: DEBUG nova.network.neutron [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 924.312051] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523213cc-2be2-eb4c-c3c4-f59686993cf0/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 924.313640] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5811178f-f952-4825-afd5-2cebf9ba3057 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.321685] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523213cc-2be2-eb4c-c3c4-f59686993cf0/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 924.321934] env[65503]: ERROR oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523213cc-2be2-eb4c-c3c4-f59686993cf0/disk-0.vmdk due to incomplete transfer. [ 924.322757] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2ef1c8a7-4db1-4f48-88ce-54f3beb36dd7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.331488] env[65503]: DEBUG oslo_vmware.rw_handles [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523213cc-2be2-eb4c-c3c4-f59686993cf0/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 924.331488] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Uploaded image b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 924.334306] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 924.335051] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-50f8eeeb-8919-42e4-8f3b-79f9997e2ee0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.342726] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 924.342726] env[65503]: value = "task-4450295" [ 924.342726] env[65503]: _type = "Task" [ 924.342726] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.356780] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450295, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.416501] env[65503]: DEBUG nova.compute.manager [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 924.416725] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.417654] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a0f9fa-9822-46e7-a11a-878bbe9a833e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.428386] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.429100] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f730cc0-0b3f-4def-9b35-fdbc8b5e8173 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.437584] env[65503]: DEBUG oslo_vmware.api [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 924.437584] env[65503]: value = "task-4450296" [ 924.437584] env[65503]: _type = "Task" [ 924.437584] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.454076] env[65503]: DEBUG oslo_vmware.api [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.609340] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450294, 'name': ReconfigVM_Task, 'duration_secs': 0.548992} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.610657] env[65503]: DEBUG nova.compute.manager [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-changed-a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 924.610938] env[65503]: DEBUG nova.compute.manager [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing instance network info cache due to event network-changed-a52973b6-14a8-480e-8ad4-92719252801c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 924.611196] env[65503]: DEBUG oslo_concurrency.lockutils [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.611348] env[65503]: DEBUG oslo_concurrency.lockutils [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.611510] env[65503]: DEBUG nova.network.neutron [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing network info cache for port a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 924.613160] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d/0ece7ad4-40cd-43e4-8cbc-dddd43f0645d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 924.614510] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f972b6dd-25f2-455c-8288-f3988515ad04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.624500] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 924.624500] env[65503]: value = "task-4450297" [ 924.624500] env[65503]: _type = "Task" [ 924.624500] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.635961] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450297, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.655152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-29e826ca-8d47-423b-8efa-4c5064ffff00 tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.658684] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.162s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.658684] env[65503]: DEBUG nova.objects.instance [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lazy-loading 'resources' on Instance uuid 1e92795e-cf30-4175-9e31-c29278f3e9e0 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.716493] env[65503]: DEBUG nova.compute.manager [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Received event network-changed-c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 924.716698] env[65503]: DEBUG nova.compute.manager [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Refreshing instance network info cache due to event network-changed-c1eaff02-bbf6-47d2-9655-443ca8021c5a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 924.716989] env[65503]: DEBUG oslo_concurrency.lockutils [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Acquiring lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.717068] env[65503]: DEBUG oslo_concurrency.lockutils [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Acquired lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.717205] env[65503]: DEBUG nova.network.neutron [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Refreshing network info cache for port c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 924.764903] env[65503]: WARNING neutronclient.v2_0.client [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 924.766009] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 924.766636] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 924.856315] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450295, 'name': Destroy_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.880095] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 924.880887] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 924.946153] env[65503]: WARNING neutronclient.v2_0.client [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 924.946153] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 924.946153] env[65503]: WARNING openstack [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 924.961139] env[65503]: DEBUG oslo_vmware.api [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450296, 'name': PowerOffVM_Task, 'duration_secs': 0.247776} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.961139] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.961139] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.961540] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5a32ce6-69aa-4082-858a-2420e94923ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.053759] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.053759] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.053759] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleting the datastore file [datastore1] bdbae548-eefc-4e59-8053-f4b8e232580d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.053759] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a0f066d-c120-4de5-91f3-9dc291367a35 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.060800] env[65503]: DEBUG oslo_vmware.api [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for the task: (returnval){ [ 925.060800] env[65503]: value = "task-4450299" [ 925.060800] env[65503]: _type = "Task" [ 925.060800] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.070581] env[65503]: DEBUG oslo_vmware.api [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.119957] env[65503]: WARNING neutronclient.v2_0.client [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 925.120826] env[65503]: WARNING openstack [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.121221] env[65503]: WARNING openstack [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.139500] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450297, 'name': Rename_Task, 'duration_secs': 0.362627} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.139819] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.140165] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a0e3319-b7fc-47b8-ad9a-d87109f3fb44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.149443] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 925.149443] env[65503]: value = "task-4450300" [ 925.149443] env[65503]: _type = "Task" [ 925.149443] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.163834] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450300, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.220832] env[65503]: WARNING neutronclient.v2_0.client [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 925.221641] env[65503]: WARNING openstack [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.222150] env[65503]: WARNING openstack [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.247823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.248021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.307096] env[65503]: DEBUG nova.network.neutron [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance_info_cache with network_info: [{"id": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "address": "fa:16:3e:9b:39:b6", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19156670-d2", "ovs_interfaceid": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 925.357015] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450295, 'name': Destroy_Task, 'duration_secs': 0.854155} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.357332] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Destroyed the VM [ 925.358089] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 925.358089] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ba3bd3cf-c32b-4d48-af13-c42c76e930a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.369287] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 925.369287] env[65503]: value = "task-4450301" [ 925.369287] env[65503]: _type = "Task" [ 925.369287] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.379793] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450301, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.555142] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c32014-9ebd-46cd-8b79-721d377757bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.568719] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955b16d6-cdab-4700-967c-e9021b721a1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.577197] env[65503]: DEBUG oslo_vmware.api [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Task: {'id': task-4450299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276671} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.577197] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.577197] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.577449] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.577449] env[65503]: INFO nova.compute.manager [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 925.577668] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 925.579530] env[65503]: DEBUG nova.compute.manager [-] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 925.579530] env[65503]: DEBUG nova.network.neutron [-] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 925.579530] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 925.579530] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.579530] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.618261] env[65503]: WARNING openstack [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.618261] env[65503]: WARNING openstack [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.630096] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d28307-3c4a-4301-8919-ae5f6989cda0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.637443] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05176149-ab5f-4567-9edd-32e9edebc08c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.653494] env[65503]: DEBUG nova.compute.provider_tree [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.666750] env[65503]: DEBUG oslo_vmware.api [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450300, 'name': PowerOnVM_Task, 'duration_secs': 0.513451} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.666750] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.666750] env[65503]: DEBUG nova.compute.manager [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 925.666750] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2be7ac-08ac-4d44-aebc-5da751bf8855 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.671046] env[65503]: WARNING openstack [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.671046] env[65503]: WARNING openstack [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.736008] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 925.755832] env[65503]: DEBUG nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 925.810651] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.881869] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450301, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.966326] env[65503]: WARNING neutronclient.v2_0.client [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 925.966984] env[65503]: WARNING openstack [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.967338] env[65503]: WARNING openstack [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.996555] env[65503]: WARNING neutronclient.v2_0.client [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 925.997746] env[65503]: WARNING openstack [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.998024] env[65503]: WARNING openstack [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 926.160638] env[65503]: DEBUG nova.scheduler.client.report [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.180341] env[65503]: DEBUG nova.network.neutron [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updated VIF entry in instance network info cache for port a52973b6-14a8-480e-8ad4-92719252801c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 926.180867] env[65503]: DEBUG nova.network.neutron [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 926.203535] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.274183] env[65503]: DEBUG nova.network.neutron [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Updated VIF entry in instance network info cache for port c1eaff02-bbf6-47d2-9655-443ca8021c5a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 926.274803] env[65503]: DEBUG nova.network.neutron [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Updating instance_info_cache with network_info: [{"id": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "address": "fa:16:3e:6e:8d:d9", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1eaff02-bb", "ovs_interfaceid": "c1eaff02-bbf6-47d2-9655-443ca8021c5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 926.287670] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.336751] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdeda65-814c-428a-9d49-2bf65695aabb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.361696] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9361db3e-d1a6-4deb-ac74-9e1c697f70e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.373649] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance '708ed8ab-0ec9-457c-966d-b11c55895981' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.388680] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450301, 'name': RemoveSnapshot_Task, 'duration_secs': 1.006206} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.388987] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 926.389482] env[65503]: DEBUG nova.compute.manager [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 926.390118] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86882d6c-0a77-4363-98fa-6b6db1221699 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.514372] env[65503]: DEBUG nova.network.neutron [-] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 926.636775] env[65503]: DEBUG nova.compute.manager [req-048d1984-549c-48cb-a961-5a8e38a93677 req-55ebd5ba-96f0-4420-bb9a-82129820c4fa service nova] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Received event network-vif-deleted-23e068ca-4763-4b07-a124-fdeee41f5399 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 926.668183] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.670907] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.449s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.671353] env[65503]: DEBUG nova.objects.instance [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'resources' on Instance uuid 628e67fa-9a28-468f-85ad-990d3f1e5d8c {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.690198] env[65503]: DEBUG oslo_concurrency.lockutils [req-b495b316-1285-4798-91fb-2a93939bce9d req-97275da2-947c-4830-a4a3-a432f45b4e18 service nova] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.696117] env[65503]: INFO nova.scheduler.client.report [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted allocations for instance 1e92795e-cf30-4175-9e31-c29278f3e9e0 [ 926.778494] env[65503]: DEBUG oslo_concurrency.lockutils [req-4b353c5e-2d41-4a58-a4a4-86e489384826 req-d2507a0c-67b3-4999-a1d3-8060c487574a service nova] Releasing lock "refresh_cache-909b3535-9410-4820-a34d-6c0e9627f506" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.885532] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.886125] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3dfbd212-8a74-47bd-b27f-c8419c55ab9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.894788] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 926.894788] env[65503]: value = "task-4450302" [ 926.894788] env[65503]: _type = "Task" [ 926.894788] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.908699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.908699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.908982] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.908982] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.909169] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.910801] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.911166] env[65503]: INFO nova.compute.manager [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Shelve offloading [ 926.913126] env[65503]: INFO nova.compute.manager [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Terminating instance [ 927.020088] env[65503]: INFO nova.compute.manager [-] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Took 1.44 seconds to deallocate network for instance. [ 927.176356] env[65503]: DEBUG nova.objects.instance [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'numa_topology' on Instance uuid 628e67fa-9a28-468f-85ad-990d3f1e5d8c {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.205153] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9dba63b6-b57b-4cf4-b27c-2eabdb34b07a tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "1e92795e-cf30-4175-9e31-c29278f3e9e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.678s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.411018] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450302, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.416776] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.417423] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "refresh_cache-0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.417592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquired lock "refresh_cache-0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.417756] env[65503]: DEBUG nova.network.neutron [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 927.418935] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d90116e-ec93-435d-b27b-0dd764177870 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.428208] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 927.428208] env[65503]: value = "task-4450303" [ 927.428208] env[65503]: _type = "Task" [ 927.428208] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.441028] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 927.441472] env[65503]: DEBUG nova.compute.manager [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 927.442768] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc96dc1-280a-4de2-abab-4c7fdfabcb15 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.450711] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.450875] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.451218] env[65503]: DEBUG nova.network.neutron [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 927.527510] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.679301] env[65503]: DEBUG nova.objects.base [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Object Instance<628e67fa-9a28-468f-85ad-990d3f1e5d8c> lazy-loaded attributes: resources,numa_topology {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 927.912539] env[65503]: DEBUG oslo_vmware.api [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450302, 'name': PowerOnVM_Task, 'duration_secs': 0.636639} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.915560] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.915746] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3211ef-6e2a-477f-8fdf-48b19785a5f6 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance '708ed8ab-0ec9-457c-966d-b11c55895981' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 927.923993] env[65503]: WARNING neutronclient.v2_0.client [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 927.923993] env[65503]: WARNING openstack [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 927.923993] env[65503]: WARNING openstack [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 927.950123] env[65503]: DEBUG nova.network.neutron [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 927.964174] env[65503]: WARNING neutronclient.v2_0.client [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 927.964906] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 927.965270] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 927.972797] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "ad85eef0-cef7-4900-b193-1737a6c2f17b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.973115] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.973287] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "ad85eef0-cef7-4900-b193-1737a6c2f17b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.973689] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.973689] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.976418] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe53ef49-86f8-4b29-80b0-b7922d83dee5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.979611] env[65503]: INFO nova.compute.manager [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Terminating instance [ 927.993767] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a6f783-44f7-4187-9520-ae45240d68cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.031168] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d4d944-6cca-4aeb-8308-1d7ad36d8028 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.036522] env[65503]: DEBUG nova.network.neutron [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 928.044402] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffcdf25-eefa-4b6e-996b-7d1b7839603c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.062357] env[65503]: DEBUG nova.compute.provider_tree [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.096991] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 928.097476] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 928.162687] env[65503]: WARNING neutronclient.v2_0.client [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 928.163538] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 928.163928] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 928.278388] env[65503]: DEBUG nova.network.neutron [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73e98445-c9", "ovs_interfaceid": "73e98445-c951-4dc2-82e3-537e2196f82a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 928.487668] env[65503]: DEBUG nova.compute.manager [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 928.487934] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 928.489328] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c8ac89-4c65-4141-9658-6766f62588fa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.499880] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 928.500072] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4534a15-07b2-4447-885e-f601ff8ce329 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.509041] env[65503]: DEBUG oslo_vmware.api [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 928.509041] env[65503]: value = "task-4450304" [ 928.509041] env[65503]: _type = "Task" [ 928.509041] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.517877] env[65503]: DEBUG oslo_vmware.api [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.539850] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Releasing lock "refresh_cache-0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.540263] env[65503]: DEBUG nova.compute.manager [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 928.540468] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 928.541576] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d5662d-913e-4121-9efe-e541057a1434 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.550713] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 928.550962] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1f6c6da-5e5b-473e-9283-773091a257c0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.559027] env[65503]: DEBUG oslo_vmware.api [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 928.559027] env[65503]: value = "task-4450305" [ 928.559027] env[65503]: _type = "Task" [ 928.559027] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.566189] env[65503]: DEBUG nova.scheduler.client.report [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.574537] env[65503]: DEBUG oslo_vmware.api [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450305, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.782916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.783595] env[65503]: WARNING neutronclient.v2_0.client [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 928.784088] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 928.784592] env[65503]: WARNING openstack [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 928.790587] env[65503]: WARNING neutronclient.v2_0.client [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 929.021350] env[65503]: DEBUG oslo_vmware.api [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450304, 'name': PowerOffVM_Task, 'duration_secs': 0.322608} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.021605] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.021744] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.022019] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3432a48-272f-4d45-9f09-2cb86ead00da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.071137] env[65503]: DEBUG oslo_vmware.api [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450305, 'name': PowerOffVM_Task, 'duration_secs': 0.188159} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.071429] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.071588] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.071882] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a69136b4-51d0-4452-9a69-7cdababc4f49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.074577] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 929.075415] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f21c73-ef5a-488e-881f-15d686cf12dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.079031] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.408s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.081520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.395s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.085273] env[65503]: INFO nova.compute.claims [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.095072] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.096047] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab0286c4-331b-4c03-a9e7-832e8180fd93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.102040] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.102040] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.102040] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleting the datastore file [datastore2] ad85eef0-cef7-4900-b193-1737a6c2f17b {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.102371] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e23a2619-3b58-452b-9475-7a5d958d6659 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.108480] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.108947] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.109269] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Deleting the datastore file [datastore1] 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.110923] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07d98496-2af8-47c5-804a-49c4404d0897 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.114636] env[65503]: DEBUG oslo_vmware.api [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for the task: (returnval){ [ 929.114636] env[65503]: value = "task-4450309" [ 929.114636] env[65503]: _type = "Task" [ 929.114636] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.121600] env[65503]: DEBUG oslo_vmware.api [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for the task: (returnval){ [ 929.121600] env[65503]: value = "task-4450310" [ 929.121600] env[65503]: _type = "Task" [ 929.121600] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.126036] env[65503]: DEBUG oslo_vmware.api [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450309, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.130708] env[65503]: DEBUG nova.compute.manager [req-b661eef6-c90f-4615-a83f-0a1c90e1d75a req-f9120333-000c-473e-84f5-50632aeb41c6 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received event network-vif-unplugged-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 929.131073] env[65503]: DEBUG oslo_concurrency.lockutils [req-b661eef6-c90f-4615-a83f-0a1c90e1d75a req-f9120333-000c-473e-84f5-50632aeb41c6 service nova] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.131957] env[65503]: DEBUG oslo_concurrency.lockutils [req-b661eef6-c90f-4615-a83f-0a1c90e1d75a req-f9120333-000c-473e-84f5-50632aeb41c6 service nova] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.132230] env[65503]: DEBUG oslo_concurrency.lockutils [req-b661eef6-c90f-4615-a83f-0a1c90e1d75a req-f9120333-000c-473e-84f5-50632aeb41c6 service nova] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.132622] env[65503]: DEBUG nova.compute.manager [req-b661eef6-c90f-4615-a83f-0a1c90e1d75a req-f9120333-000c-473e-84f5-50632aeb41c6 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] No waiting events found dispatching network-vif-unplugged-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 929.132622] env[65503]: WARNING nova.compute.manager [req-b661eef6-c90f-4615-a83f-0a1c90e1d75a req-f9120333-000c-473e-84f5-50632aeb41c6 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received unexpected event network-vif-unplugged-73e98445-c951-4dc2-82e3-537e2196f82a for instance with vm_state shelved and task_state shelving_offloading. [ 929.141382] env[65503]: DEBUG oslo_vmware.api [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.182711] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.183137] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.183362] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleting the datastore file [datastore1] ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.183738] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d457e966-76d2-4d7c-af0f-7591e9ddc878 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.191330] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 929.191330] env[65503]: value = "task-4450311" [ 929.191330] env[65503]: _type = "Task" [ 929.191330] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.202897] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.299948] env[65503]: DEBUG nova.compute.manager [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 929.301024] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6be3928-fd54-45c3-af58-137063920498 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.596086] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95921418-06d3-415a-976e-f0db161c5299 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 52.565s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.597784] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 26.783s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.598081] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.598338] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.598548] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.600636] env[65503]: INFO nova.compute.manager [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Terminating instance [ 929.626769] env[65503]: DEBUG oslo_vmware.api [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Task: {'id': task-4450309, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33019} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.630052] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.630332] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.630517] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.630687] env[65503]: INFO nova.compute.manager [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 929.630985] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 929.631503] env[65503]: DEBUG nova.compute.manager [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 929.631603] env[65503]: DEBUG nova.network.neutron [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 929.631840] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 929.632683] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 929.632952] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 929.646155] env[65503]: DEBUG oslo_vmware.api [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Task: {'id': task-4450310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139172} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.646430] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.646608] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.646778] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.646945] env[65503]: INFO nova.compute.manager [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 929.647209] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 929.647417] env[65503]: DEBUG nova.compute.manager [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 929.647517] env[65503]: DEBUG nova.network.neutron [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 929.647762] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 929.648330] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 929.648584] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 929.673882] env[65503]: DEBUG nova.network.neutron [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 929.674318] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 929.681154] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 929.701631] env[65503]: DEBUG oslo_vmware.api [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243279} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.701773] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.701854] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.702131] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.730959] env[65503]: INFO nova.scheduler.client.report [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleted allocations for instance ff256d3f-af88-4f01-bdfd-cf89e06ab364 [ 929.813533] env[65503]: INFO nova.compute.manager [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] instance snapshotting [ 929.813904] env[65503]: DEBUG nova.objects.instance [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'flavor' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.958017] env[65503]: DEBUG nova.compute.manager [req-6dabd9c9-9b29-4060-8caa-3ddb5fc8a034 req-db6460f6-016d-4e5a-8a26-e7e87e32bd56 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Received event network-vif-deleted-6164c166-4054-4e4a-93fb-6e84abe74f7d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 929.958255] env[65503]: INFO nova.compute.manager [req-6dabd9c9-9b29-4060-8caa-3ddb5fc8a034 req-db6460f6-016d-4e5a-8a26-e7e87e32bd56 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Neutron deleted interface 6164c166-4054-4e4a-93fb-6e84abe74f7d; detaching it from the instance and deleting it from the info cache [ 929.958473] env[65503]: DEBUG nova.network.neutron [req-6dabd9c9-9b29-4060-8caa-3ddb5fc8a034 req-db6460f6-016d-4e5a-8a26-e7e87e32bd56 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 930.045499] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "708ed8ab-0ec9-457c-966d-b11c55895981" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.045837] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.046084] env[65503]: DEBUG nova.compute.manager [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Going to confirm migration 3 {{(pid=65503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 930.104263] env[65503]: DEBUG nova.compute.manager [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 930.104579] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.104892] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4adb756-2b41-44e9-acc9-cdd3e0461556 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.115612] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3764cf7b-e580-4a3e-ba6a-9f679988cb5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.151402] env[65503]: WARNING nova.virt.vmwareapi.vmops [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 628e67fa-9a28-468f-85ad-990d3f1e5d8c could not be found. [ 930.151670] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 930.151855] env[65503]: INFO nova.compute.manager [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 930.152119] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 930.155491] env[65503]: DEBUG nova.compute.manager [-] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 930.155491] env[65503]: DEBUG nova.network.neutron [-] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 930.156065] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 930.156754] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 930.156754] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 930.177396] env[65503]: DEBUG nova.network.neutron [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 930.198136] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 930.236598] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.321095] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb77af5b-597a-4700-b8a0-64d57cd0ef99 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.347798] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe44db4-696e-4c05-869e-1dbabaa1c5f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.410978] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83162f4e-2e5a-460c-818e-82b78a4f698b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.424833] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721210b8-bbfa-4dae-a8b9-1342ab72f0bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.430380] env[65503]: DEBUG nova.network.neutron [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 930.464192] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96298435-c9ca-4ff1-827c-0a17a4ef62dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.466769] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7093d0bd-2d12-4db3-9026-78087d02a197 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.477539] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2d8a1e-b29e-4289-a56a-4120247dc664 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.485729] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4115b81a-244b-4ead-b61a-a2a4c5214b00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.507500] env[65503]: DEBUG nova.compute.provider_tree [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.519168] env[65503]: DEBUG nova.compute.manager [req-6dabd9c9-9b29-4060-8caa-3ddb5fc8a034 req-db6460f6-016d-4e5a-8a26-e7e87e32bd56 service nova] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Detach interface failed, port_id=6164c166-4054-4e4a-93fb-6e84abe74f7d, reason: Instance ad85eef0-cef7-4900-b193-1737a6c2f17b could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 930.520272] env[65503]: DEBUG nova.scheduler.client.report [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.552480] env[65503]: WARNING neutronclient.v2_0.client [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 930.589309] env[65503]: WARNING neutronclient.v2_0.client [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 930.589665] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.591018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.591018] env[65503]: DEBUG nova.network.neutron [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 930.591018] env[65503]: DEBUG nova.objects.instance [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lazy-loading 'info_cache' on Instance uuid 708ed8ab-0ec9-457c-966d-b11c55895981 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.679826] env[65503]: INFO nova.compute.manager [-] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Took 1.03 seconds to deallocate network for instance. [ 930.860387] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 930.860792] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5af6880d-ed59-4daf-ab35-40241202326b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.870442] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 930.870442] env[65503]: value = "task-4450312" [ 930.870442] env[65503]: _type = "Task" [ 930.870442] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.881014] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450312, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.962863] env[65503]: INFO nova.compute.manager [-] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Took 1.33 seconds to deallocate network for instance. [ 930.971051] env[65503]: DEBUG nova.network.neutron [-] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 931.025015] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.943s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.025605] env[65503]: DEBUG nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 931.029514] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.530s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.029761] env[65503]: DEBUG nova.objects.instance [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lazy-loading 'resources' on Instance uuid dcc876ae-075f-48d2-81a4-a1b780d6fdec {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.156757] env[65503]: DEBUG nova.compute.manager [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received event network-changed-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 931.157607] env[65503]: DEBUG nova.compute.manager [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Refreshing instance network info cache due to event network-changed-73e98445-c951-4dc2-82e3-537e2196f82a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 931.157607] env[65503]: DEBUG oslo_concurrency.lockutils [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Acquiring lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.157607] env[65503]: DEBUG oslo_concurrency.lockutils [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Acquired lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.157607] env[65503]: DEBUG nova.network.neutron [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Refreshing network info cache for port 73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 931.188396] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.380398] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450312, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.469824] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.473558] env[65503]: INFO nova.compute.manager [-] [instance: 628e67fa-9a28-468f-85ad-990d3f1e5d8c] Took 1.32 seconds to deallocate network for instance. [ 931.532824] env[65503]: DEBUG nova.compute.utils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 931.535051] env[65503]: DEBUG nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 931.535373] env[65503]: DEBUG nova.network.neutron [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 931.535800] env[65503]: WARNING neutronclient.v2_0.client [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 931.536258] env[65503]: WARNING neutronclient.v2_0.client [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 931.536881] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 931.537325] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 931.548597] env[65503]: DEBUG nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 931.598058] env[65503]: WARNING neutronclient.v2_0.client [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 931.598799] env[65503]: WARNING openstack [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 931.599494] env[65503]: WARNING openstack [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 931.609273] env[65503]: DEBUG nova.policy [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e6928ec26ef495bb726fbb1c452594e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d35478f823448c59b44c2b974942ba9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 931.661116] env[65503]: WARNING neutronclient.v2_0.client [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 931.661929] env[65503]: WARNING openstack [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 931.662377] env[65503]: WARNING openstack [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 931.739309] env[65503]: WARNING openstack [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 931.739692] env[65503]: WARNING openstack [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 931.796053] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.805072] env[65503]: WARNING openstack [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 931.805476] env[65503]: WARNING openstack [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 931.829834] env[65503]: WARNING neutronclient.v2_0.client [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 931.830549] env[65503]: WARNING openstack [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 931.830999] env[65503]: WARNING openstack [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 931.863868] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7963bc-853d-4c80-8daf-3397bc12ce23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.877500] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f942b7-3ab8-417d-b035-035dd6d7efb1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.887256] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450312, 'name': CreateSnapshot_Task, 'duration_secs': 0.892676} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.911463] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 931.917292] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2265ab78-1583-475d-b851-0b4be7a49813 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.920286] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a491562b-a810-45ab-8a51-aea7875801ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.923785] env[65503]: DEBUG nova.network.neutron [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Successfully created port: 78d90e7e-c69f-428e-aa59-7da9a9c04d8d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 931.937499] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b87795-8dab-4af8-b7ed-8df1db9514cf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.957123] env[65503]: DEBUG nova.compute.provider_tree [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.968650] env[65503]: WARNING neutronclient.v2_0.client [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 931.968650] env[65503]: WARNING openstack [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 931.968779] env[65503]: WARNING openstack [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 932.001183] env[65503]: DEBUG nova.network.neutron [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance_info_cache with network_info: [{"id": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "address": "fa:16:3e:9b:39:b6", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19156670-d2", "ovs_interfaceid": "19156670-d2c9-45a9-b9a1-2ab187cf5f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 932.078364] env[65503]: DEBUG nova.network.neutron [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updated VIF entry in instance network info cache for port 73e98445-c951-4dc2-82e3-537e2196f82a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 932.078753] env[65503]: DEBUG nova.network.neutron [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap73e98445-c9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 932.445970] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 932.445970] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9c116743-bb39-43d4-9b6a-a5c32cce84f6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.455274] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 932.455274] env[65503]: value = "task-4450313" [ 932.455274] env[65503]: _type = "Task" [ 932.455274] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.462794] env[65503]: DEBUG nova.scheduler.client.report [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.469638] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450313, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.497739] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d16314a0-8f43-4056-b643-565b72e5c5b6 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "628e67fa-9a28-468f-85ad-990d3f1e5d8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.900s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.504869] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-708ed8ab-0ec9-457c-966d-b11c55895981" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.505145] env[65503]: DEBUG nova.objects.instance [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lazy-loading 'migration_context' on Instance uuid 708ed8ab-0ec9-457c-966d-b11c55895981 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.559032] env[65503]: DEBUG nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 932.581805] env[65503]: DEBUG oslo_concurrency.lockutils [req-b24d659d-1b41-4ef5-bd5b-959b19d3dd2f req-dffee3c4-ac48-4195-b81e-796d7a0fe442 service nova] Releasing lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.584409] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 932.584783] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.584866] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 932.585040] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.585180] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 932.585332] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 932.585546] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.585698] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 932.585860] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 932.586041] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 932.586266] env[65503]: DEBUG nova.virt.hardware [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 932.587261] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbd6c87-4ce4-4d71-94a7-af2b390d27c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.595737] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45320993-586c-4882-8cba-86b9f206fe5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.966627] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450313, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.971590] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.975028] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.886s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.975274] env[65503]: DEBUG nova.objects.instance [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lazy-loading 'resources' on Instance uuid bc0c0066-b672-4385-8d68-c14e3635af4e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.992654] env[65503]: INFO nova.scheduler.client.report [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Deleted allocations for instance dcc876ae-075f-48d2-81a4-a1b780d6fdec [ 933.007896] env[65503]: DEBUG nova.objects.base [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Object Instance<708ed8ab-0ec9-457c-966d-b11c55895981> lazy-loaded attributes: info_cache,migration_context {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 933.009384] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d04c45-5e85-4b0e-8300-d16afe963b94 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.032424] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fd4d07a-701d-48cf-aec2-d17a9685e07f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.040241] env[65503]: DEBUG oslo_vmware.api [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 933.040241] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5244454d-801e-bba0-e35c-852768f55447" [ 933.040241] env[65503]: _type = "Task" [ 933.040241] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.050778] env[65503]: DEBUG oslo_vmware.api [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5244454d-801e-bba0-e35c-852768f55447, 'name': SearchDatastore_Task, 'duration_secs': 0.008336} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.051095] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.205303] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.205411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.427047] env[65503]: DEBUG nova.compute.manager [req-d38aab1b-a6b4-4096-9680-dd749f715676 req-daacfc01-add9-4a30-a74e-f058561b6be3 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Received event network-vif-plugged-78d90e7e-c69f-428e-aa59-7da9a9c04d8d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 933.427047] env[65503]: DEBUG oslo_concurrency.lockutils [req-d38aab1b-a6b4-4096-9680-dd749f715676 req-daacfc01-add9-4a30-a74e-f058561b6be3 service nova] Acquiring lock "f666b0a3-3679-456b-bc59-38107c299f80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.427047] env[65503]: DEBUG oslo_concurrency.lockutils [req-d38aab1b-a6b4-4096-9680-dd749f715676 req-daacfc01-add9-4a30-a74e-f058561b6be3 service nova] Lock "f666b0a3-3679-456b-bc59-38107c299f80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.427276] env[65503]: DEBUG oslo_concurrency.lockutils [req-d38aab1b-a6b4-4096-9680-dd749f715676 req-daacfc01-add9-4a30-a74e-f058561b6be3 service nova] Lock "f666b0a3-3679-456b-bc59-38107c299f80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.427430] env[65503]: DEBUG nova.compute.manager [req-d38aab1b-a6b4-4096-9680-dd749f715676 req-daacfc01-add9-4a30-a74e-f058561b6be3 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] No waiting events found dispatching network-vif-plugged-78d90e7e-c69f-428e-aa59-7da9a9c04d8d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 933.428100] env[65503]: WARNING nova.compute.manager [req-d38aab1b-a6b4-4096-9680-dd749f715676 req-daacfc01-add9-4a30-a74e-f058561b6be3 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Received unexpected event network-vif-plugged-78d90e7e-c69f-428e-aa59-7da9a9c04d8d for instance with vm_state building and task_state spawning. [ 933.468316] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450313, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.507490] env[65503]: DEBUG oslo_concurrency.lockutils [None req-605bc17f-270b-4fe0-9836-4ece95fee8ff tempest-ServersListShow296Test-1604019957 tempest-ServersListShow296Test-1604019957-project-member] Lock "dcc876ae-075f-48d2-81a4-a1b780d6fdec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.316s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.545663] env[65503]: DEBUG nova.network.neutron [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Successfully updated port: 78d90e7e-c69f-428e-aa59-7da9a9c04d8d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 933.707356] env[65503]: DEBUG nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 933.765089] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15322778-eb65-4408-9712-46a63c9a3450 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.772881] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d14e385-3f7f-47a5-95a0-e43ef5800813 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.806291] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab01fd9-0458-456e-b0b9-0a6f6b872793 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.814874] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c901d5-7845-46fd-846b-9a9b19a0e51a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.829585] env[65503]: DEBUG nova.compute.provider_tree [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.968931] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450313, 'name': CloneVM_Task, 'duration_secs': 1.350814} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.969564] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Created linked-clone VM from snapshot [ 933.970355] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9012e415-3a72-4730-851f-34ea4041b206 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.978642] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Uploading image b6cceed3-7848-4311-8d40-eea9eb611582 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 934.001566] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 934.001566] env[65503]: value = "vm-870408" [ 934.001566] env[65503]: _type = "VirtualMachine" [ 934.001566] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 934.001889] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-17ad442a-e557-4e50-8e1d-0472adfcac01 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.011092] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease: (returnval){ [ 934.011092] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5231aaf4-f5a0-f0df-0f66-8ea93372bee2" [ 934.011092] env[65503]: _type = "HttpNfcLease" [ 934.011092] env[65503]: } obtained for exporting VM: (result){ [ 934.011092] env[65503]: value = "vm-870408" [ 934.011092] env[65503]: _type = "VirtualMachine" [ 934.011092] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 934.011399] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the lease: (returnval){ [ 934.011399] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5231aaf4-f5a0-f0df-0f66-8ea93372bee2" [ 934.011399] env[65503]: _type = "HttpNfcLease" [ 934.011399] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 934.019657] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.019657] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5231aaf4-f5a0-f0df-0f66-8ea93372bee2" [ 934.019657] env[65503]: _type = "HttpNfcLease" [ 934.019657] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 934.050562] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "refresh_cache-f666b0a3-3679-456b-bc59-38107c299f80" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.050562] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquired lock "refresh_cache-f666b0a3-3679-456b-bc59-38107c299f80" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.050562] env[65503]: DEBUG nova.network.neutron [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 934.232412] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.332797] env[65503]: DEBUG nova.scheduler.client.report [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 934.519847] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.519847] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5231aaf4-f5a0-f0df-0f66-8ea93372bee2" [ 934.519847] env[65503]: _type = "HttpNfcLease" [ 934.519847] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 934.520159] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 934.520159] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5231aaf4-f5a0-f0df-0f66-8ea93372bee2" [ 934.520159] env[65503]: _type = "HttpNfcLease" [ 934.520159] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 934.520900] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e77015b-071a-487f-b2bd-b70bf90b9fe4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.528408] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ecb0ad-b070-b4bc-8d8e-3880c0fe2255/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 934.528578] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ecb0ad-b070-b4bc-8d8e-3880c0fe2255/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 934.584942] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 934.585336] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 934.623744] env[65503]: DEBUG nova.network.neutron [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 934.639036] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-756b0665-8efd-485e-a96c-21b0810980ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.644405] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 934.644865] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 934.714245] env[65503]: WARNING neutronclient.v2_0.client [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 934.714786] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 934.715524] env[65503]: WARNING openstack [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 934.808710] env[65503]: DEBUG nova.network.neutron [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Updating instance_info_cache with network_info: [{"id": "78d90e7e-c69f-428e-aa59-7da9a9c04d8d", "address": "fa:16:3e:66:8f:ac", "network": {"id": "7a9f5061-0821-4b75-aa95-84bbbc04ca34", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-736832854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d35478f823448c59b44c2b974942ba9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d90e7e-c6", "ovs_interfaceid": "78d90e7e-c69f-428e-aa59-7da9a9c04d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 934.838313] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.864s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.842698] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.110s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.842698] env[65503]: DEBUG nova.objects.instance [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lazy-loading 'resources' on Instance uuid d7dd714a-a738-4d68-bbf0-32daf4a1c49b {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.865990] env[65503]: INFO nova.scheduler.client.report [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleted allocations for instance bc0c0066-b672-4385-8d68-c14e3635af4e [ 935.311959] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Releasing lock "refresh_cache-f666b0a3-3679-456b-bc59-38107c299f80" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.312498] env[65503]: DEBUG nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Instance network_info: |[{"id": "78d90e7e-c69f-428e-aa59-7da9a9c04d8d", "address": "fa:16:3e:66:8f:ac", "network": {"id": "7a9f5061-0821-4b75-aa95-84bbbc04ca34", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-736832854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d35478f823448c59b44c2b974942ba9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d90e7e-c6", "ovs_interfaceid": "78d90e7e-c69f-428e-aa59-7da9a9c04d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 935.313132] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:8f:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a78d5760-0bb1-4476-9578-8ad3c3144439', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78d90e7e-c69f-428e-aa59-7da9a9c04d8d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.321660] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Creating folder: Project (9d35478f823448c59b44c2b974942ba9). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 935.322172] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac1dd648-f568-4946-978d-2e1f646faca1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.337254] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Created folder: Project (9d35478f823448c59b44c2b974942ba9) in parent group-v870190. [ 935.337536] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Creating folder: Instances. Parent ref: group-v870409. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 935.337926] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7e17517-9d98-4f0d-b5eb-70b09cbea8b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.352289] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Created folder: Instances in parent group-v870409. [ 935.352289] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 935.352379] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 935.353257] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34c013b1-784b-490a-9c60-bb32be6de62a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.379213] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db067abc-0384-4a0a-b3f5-8bb1fe477c91 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "bc0c0066-b672-4385-8d68-c14e3635af4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.780s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.383865] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.383865] env[65503]: value = "task-4450317" [ 935.383865] env[65503]: _type = "Task" [ 935.383865] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.396310] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450317, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.552184] env[65503]: DEBUG nova.compute.manager [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Received event network-changed-78d90e7e-c69f-428e-aa59-7da9a9c04d8d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 935.552466] env[65503]: DEBUG nova.compute.manager [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Refreshing instance network info cache due to event network-changed-78d90e7e-c69f-428e-aa59-7da9a9c04d8d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 935.552712] env[65503]: DEBUG oslo_concurrency.lockutils [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Acquiring lock "refresh_cache-f666b0a3-3679-456b-bc59-38107c299f80" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.552882] env[65503]: DEBUG oslo_concurrency.lockutils [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Acquired lock "refresh_cache-f666b0a3-3679-456b-bc59-38107c299f80" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.553075] env[65503]: DEBUG nova.network.neutron [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Refreshing network info cache for port 78d90e7e-c69f-428e-aa59-7da9a9c04d8d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 935.627813] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f901f9-ca26-4d9e-8b5c-97079621b185 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.637111] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9e728a-802f-42f1-a592-b3120999d9c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.676657] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd249348-e8b4-4816-80d9-ae9d7e8586c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.684898] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a48ad1-9a65-41a9-97da-d4a34059c232 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.701481] env[65503]: DEBUG nova.compute.provider_tree [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.895405] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450317, 'name': CreateVM_Task, 'duration_secs': 0.364183} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.895582] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.896190] env[65503]: WARNING neutronclient.v2_0.client [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 935.896917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.897131] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.897635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 935.897933] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cad8e43c-58af-4c3f-8eb6-34da7d794f92 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.903881] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 935.903881] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e3d634-263f-2351-8a5f-31b0465c1d0f" [ 935.903881] env[65503]: _type = "Task" [ 935.903881] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.913461] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e3d634-263f-2351-8a5f-31b0465c1d0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.060930] env[65503]: WARNING neutronclient.v2_0.client [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 936.061776] env[65503]: WARNING openstack [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 936.062303] env[65503]: WARNING openstack [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 936.167094] env[65503]: WARNING openstack [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 936.167552] env[65503]: WARNING openstack [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 936.205421] env[65503]: DEBUG nova.scheduler.client.report [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.254809] env[65503]: WARNING neutronclient.v2_0.client [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 936.255712] env[65503]: WARNING openstack [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 936.256089] env[65503]: WARNING openstack [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 936.360070] env[65503]: DEBUG nova.network.neutron [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Updated VIF entry in instance network info cache for port 78d90e7e-c69f-428e-aa59-7da9a9c04d8d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 936.360372] env[65503]: DEBUG nova.network.neutron [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Updating instance_info_cache with network_info: [{"id": "78d90e7e-c69f-428e-aa59-7da9a9c04d8d", "address": "fa:16:3e:66:8f:ac", "network": {"id": "7a9f5061-0821-4b75-aa95-84bbbc04ca34", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-736832854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d35478f823448c59b44c2b974942ba9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d90e7e-c6", "ovs_interfaceid": "78d90e7e-c69f-428e-aa59-7da9a9c04d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 936.416605] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e3d634-263f-2351-8a5f-31b0465c1d0f, 'name': SearchDatastore_Task, 'duration_secs': 0.010494} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.416966] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.417310] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 936.417478] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.417639] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.417824] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.418131] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bf0457f-9224-4ef0-9714-e59cc0830366 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.428206] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.428416] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 936.429234] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7797d287-eabe-457d-a21f-3e53243ae62c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.435837] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 936.435837] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5297fa88-c531-8d5d-f072-f49b43e46c58" [ 936.435837] env[65503]: _type = "Task" [ 936.435837] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.445226] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5297fa88-c531-8d5d-f072-f49b43e46c58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.710778] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.713741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.866s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.713977] env[65503]: DEBUG nova.objects.instance [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lazy-loading 'resources' on Instance uuid 236e651f-6a27-4601-8a76-ca1619e32dc6 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.738020] env[65503]: INFO nova.scheduler.client.report [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted allocations for instance d7dd714a-a738-4d68-bbf0-32daf4a1c49b [ 936.847497] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "8962b1b8-4875-4a1a-b231-36385755a976" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.847697] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "8962b1b8-4875-4a1a-b231-36385755a976" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.862885] env[65503]: DEBUG oslo_concurrency.lockutils [req-eb4669db-f4e7-46e5-92ae-dcdd72f65814 req-a8f71922-f91b-4529-99c3-f499f970f173 service nova] Releasing lock "refresh_cache-f666b0a3-3679-456b-bc59-38107c299f80" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.946911] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5297fa88-c531-8d5d-f072-f49b43e46c58, 'name': SearchDatastore_Task, 'duration_secs': 0.014895} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.947750] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fa6b840-7b7c-4001-be39-34e9fdddd4df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.954484] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 936.954484] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c4ef97-4fad-6741-684d-788c88e6bc9e" [ 936.954484] env[65503]: _type = "Task" [ 936.954484] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.964248] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c4ef97-4fad-6741-684d-788c88e6bc9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.247666] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d204f119-33c2-4fce-9b04-3d2d0922912a tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "d7dd714a-a738-4d68-bbf0-32daf4a1c49b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.031s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.350814] env[65503]: DEBUG nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 937.465996] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c4ef97-4fad-6741-684d-788c88e6bc9e, 'name': SearchDatastore_Task, 'duration_secs': 0.013367} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.468825] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.469099] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f666b0a3-3679-456b-bc59-38107c299f80/f666b0a3-3679-456b-bc59-38107c299f80.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 937.469570] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-302bb237-f51c-4832-a68f-d5f9b283e35e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.477184] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 937.477184] env[65503]: value = "task-4450318" [ 937.477184] env[65503]: _type = "Task" [ 937.477184] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.482229] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d5a33b-dd43-4191-8208-1573698e8353 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.492944] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7ae39d-1740-4208-9680-995000d56735 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.496476] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.525705] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298290e2-0150-47b5-bf4f-e845391e8e43 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.534213] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49158ca1-bc85-45f4-a98c-7960efb8e411 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.548927] env[65503]: DEBUG nova.compute.provider_tree [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.630221] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.630527] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.878700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.988397] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450318, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.052657] env[65503]: DEBUG nova.scheduler.client.report [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.139925] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.140116] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.140543] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.140543] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.140543] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.141031] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.141031] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 938.141031] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.490076] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450318, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551593} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.490076] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f666b0a3-3679-456b-bc59-38107c299f80/f666b0a3-3679-456b-bc59-38107c299f80.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 938.490076] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 938.490521] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-100e7302-cebf-41d1-ae4e-ea381d349cc2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.497860] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 938.497860] env[65503]: value = "task-4450319" [ 938.497860] env[65503]: _type = "Task" [ 938.497860] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.509028] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.558240] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.560986] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.358s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.561142] env[65503]: DEBUG nova.objects.instance [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 938.584042] env[65503]: INFO nova.scheduler.client.report [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Deleted allocations for instance 236e651f-6a27-4601-8a76-ca1619e32dc6 [ 938.644710] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.010647] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080099} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.012275] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 939.013559] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b82ce37-832e-4578-9c5a-f9e137e3c56d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.052587] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] f666b0a3-3679-456b-bc59-38107c299f80/f666b0a3-3679-456b-bc59-38107c299f80.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.053099] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bda9a590-ef13-4796-b829-abd91e436eec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.085030] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 939.085030] env[65503]: value = "task-4450320" [ 939.085030] env[65503]: _type = "Task" [ 939.085030] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.091325] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1c3aabd1-8025-4c7b-96d7-61eee168af8d tempest-MultipleCreateTestJSON-1754149764 tempest-MultipleCreateTestJSON-1754149764-project-member] Lock "236e651f-6a27-4601-8a76-ca1619e32dc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.781s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.098867] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450320, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.579049] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5b2166cb-ce54-4c2e-b31c-cdc336daf682 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.580244] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.293s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.581962] env[65503]: INFO nova.compute.claims [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.594531] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450320, 'name': ReconfigVM_Task, 'duration_secs': 0.291954} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.596040] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Reconfigured VM instance instance-0000004d to attach disk [datastore1] f666b0a3-3679-456b-bc59-38107c299f80/f666b0a3-3679-456b-bc59-38107c299f80.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 939.597327] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-072fd479-a31a-4bb3-80b1-e105d3b32281 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.605512] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 939.605512] env[65503]: value = "task-4450321" [ 939.605512] env[65503]: _type = "Task" [ 939.605512] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.615136] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450321, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.117769] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450321, 'name': Rename_Task, 'duration_secs': 0.143904} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.118026] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.118276] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3980108c-5c95-4904-b177-ffd941e04215 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.127038] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 940.127038] env[65503]: value = "task-4450322" [ 940.127038] env[65503]: _type = "Task" [ 940.127038] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.135703] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.637934] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450322, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.863466] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302a5c85-37b8-4147-865e-df7d8ff9e347 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.872410] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a415db9-445d-47a5-8af0-44d6d3e05123 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.907258] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746f32da-d004-4f22-865f-8f650a0b1f43 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.916809] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58eee57-b1d6-4205-b6d3-8538cdb82f06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.932561] env[65503]: DEBUG nova.compute.provider_tree [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.139926] env[65503]: DEBUG oslo_vmware.api [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450322, 'name': PowerOnVM_Task, 'duration_secs': 0.638332} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.140288] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.140540] env[65503]: INFO nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Took 8.58 seconds to spawn the instance on the hypervisor. [ 941.140702] env[65503]: DEBUG nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 941.141887] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2789e993-8c57-4d75-8689-08e5ec0f64e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.436947] env[65503]: DEBUG nova.scheduler.client.report [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.661858] env[65503]: INFO nova.compute.manager [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Took 39.99 seconds to build instance. [ 941.944315] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.945114] env[65503]: DEBUG nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 941.947624] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.420s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.947896] env[65503]: DEBUG nova.objects.instance [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lazy-loading 'resources' on Instance uuid bdbae548-eefc-4e59-8053-f4b8e232580d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.163746] env[65503]: DEBUG oslo_concurrency.lockutils [None req-875b12c8-69e1-4010-838f-227fd97efc81 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "f666b0a3-3679-456b-bc59-38107c299f80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.503s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.451953] env[65503]: DEBUG nova.compute.utils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 942.453720] env[65503]: DEBUG nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 942.453720] env[65503]: DEBUG nova.network.neutron [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 942.453996] env[65503]: WARNING neutronclient.v2_0.client [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 942.455493] env[65503]: WARNING neutronclient.v2_0.client [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 942.456132] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 942.456505] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 942.467379] env[65503]: DEBUG nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 942.537755] env[65503]: DEBUG nova.policy [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e97b5208de384c19bbc0e332b67fc4ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c5b0c3771b5434992cd58e1af539bde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 942.752958] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82562dd9-cca6-43f0-bd14-72614ba0c77d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.760120] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a98c98-2095-43ef-9ce0-9a10288ecbe7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.794720] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a483abfb-c6cf-439e-b157-01a538366bf1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.804702] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1f601e-fcc8-4582-b26d-851a5041ccc7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.821565] env[65503]: DEBUG nova.compute.provider_tree [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.860099] env[65503]: DEBUG nova.network.neutron [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Successfully created port: b2f08e99-4a10-4a2d-8aff-83b8e5454187 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 943.030854] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ecb0ad-b070-b4bc-8d8e-3880c0fe2255/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 943.031953] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4448abcb-7c54-48a4-9b81-b8aa1ba7302c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.039565] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ecb0ad-b070-b4bc-8d8e-3880c0fe2255/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 943.039763] env[65503]: ERROR oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ecb0ad-b070-b4bc-8d8e-3880c0fe2255/disk-0.vmdk due to incomplete transfer. [ 943.040016] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6325beb9-aaf3-4eed-a4b8-4bffe64d2760 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.049403] env[65503]: DEBUG oslo_vmware.rw_handles [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ecb0ad-b070-b4bc-8d8e-3880c0fe2255/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 943.049513] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Uploaded image b6cceed3-7848-4311-8d40-eea9eb611582 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 943.051948] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 943.052307] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4f05f52e-ebcf-49d4-aeaf-11119237486f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.060141] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 943.060141] env[65503]: value = "task-4450323" [ 943.060141] env[65503]: _type = "Task" [ 943.060141] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.068762] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450323, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.151996] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "f666b0a3-3679-456b-bc59-38107c299f80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.151996] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "f666b0a3-3679-456b-bc59-38107c299f80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.151996] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "f666b0a3-3679-456b-bc59-38107c299f80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.152274] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "f666b0a3-3679-456b-bc59-38107c299f80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.152274] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "f666b0a3-3679-456b-bc59-38107c299f80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.154608] env[65503]: INFO nova.compute.manager [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Terminating instance [ 943.325155] env[65503]: DEBUG nova.scheduler.client.report [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 943.477689] env[65503]: DEBUG nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 943.504181] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 943.504527] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 943.504718] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 943.504903] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 943.505066] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 943.505240] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 943.505449] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.505607] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 943.505790] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 943.505952] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 943.506151] env[65503]: DEBUG nova.virt.hardware [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 943.507390] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95cd419-93c1-439c-8a2b-686e5c4b8919 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.517163] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fb267a-0e17-4027-b3a9-4244bc8f6fc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.569914] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450323, 'name': Destroy_Task, 'duration_secs': 0.335934} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.570223] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Destroyed the VM [ 943.570458] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 943.570725] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bdfb2733-dd3f-4e52-93db-ab9b32cb20d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.578645] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 943.578645] env[65503]: value = "task-4450324" [ 943.578645] env[65503]: _type = "Task" [ 943.578645] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.588780] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450324, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.659220] env[65503]: DEBUG nova.compute.manager [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 943.659515] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.660795] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab609c05-0efa-4109-bde3-1d0684e40d1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.669957] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.670396] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d03d72ba-81f6-46b2-a5d4-bca06ff87a39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.678871] env[65503]: DEBUG oslo_vmware.api [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 943.678871] env[65503]: value = "task-4450325" [ 943.678871] env[65503]: _type = "Task" [ 943.678871] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.690125] env[65503]: DEBUG oslo_vmware.api [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.831955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.834295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.598s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.835077] env[65503]: DEBUG nova.objects.instance [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lazy-loading 'resources' on Instance uuid ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.860259] env[65503]: INFO nova.scheduler.client.report [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Deleted allocations for instance bdbae548-eefc-4e59-8053-f4b8e232580d [ 944.089952] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450324, 'name': RemoveSnapshot_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.189949] env[65503]: DEBUG oslo_vmware.api [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450325, 'name': PowerOffVM_Task, 'duration_secs': 0.316433} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.190251] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.190415] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.190675] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89906fd8-898c-41a0-ac40-bb8c3f7b4821 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.255213] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.255425] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.255649] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Deleting the datastore file [datastore1] f666b0a3-3679-456b-bc59-38107c299f80 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.255933] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57a98057-47c6-42b3-af64-36c4842ecb76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.263594] env[65503]: DEBUG oslo_vmware.api [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for the task: (returnval){ [ 944.263594] env[65503]: value = "task-4450327" [ 944.263594] env[65503]: _type = "Task" [ 944.263594] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.275814] env[65503]: DEBUG nova.compute.manager [req-20437456-3a9c-4415-89b1-090d4bccfa69 req-a1b6fc46-7fcb-4a5e-9ff0-fc575c496357 service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Received event network-vif-plugged-b2f08e99-4a10-4a2d-8aff-83b8e5454187 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 944.276045] env[65503]: DEBUG oslo_concurrency.lockutils [req-20437456-3a9c-4415-89b1-090d4bccfa69 req-a1b6fc46-7fcb-4a5e-9ff0-fc575c496357 service nova] Acquiring lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.276251] env[65503]: DEBUG oslo_concurrency.lockutils [req-20437456-3a9c-4415-89b1-090d4bccfa69 req-a1b6fc46-7fcb-4a5e-9ff0-fc575c496357 service nova] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.276413] env[65503]: DEBUG oslo_concurrency.lockutils [req-20437456-3a9c-4415-89b1-090d4bccfa69 req-a1b6fc46-7fcb-4a5e-9ff0-fc575c496357 service nova] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.276570] env[65503]: DEBUG nova.compute.manager [req-20437456-3a9c-4415-89b1-090d4bccfa69 req-a1b6fc46-7fcb-4a5e-9ff0-fc575c496357 service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] No waiting events found dispatching network-vif-plugged-b2f08e99-4a10-4a2d-8aff-83b8e5454187 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 944.276762] env[65503]: WARNING nova.compute.manager [req-20437456-3a9c-4415-89b1-090d4bccfa69 req-a1b6fc46-7fcb-4a5e-9ff0-fc575c496357 service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Received unexpected event network-vif-plugged-b2f08e99-4a10-4a2d-8aff-83b8e5454187 for instance with vm_state building and task_state spawning. [ 944.277499] env[65503]: DEBUG oslo_vmware.api [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.337305] env[65503]: DEBUG nova.objects.instance [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lazy-loading 'numa_topology' on Instance uuid ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.370592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1ab013ae-6eee-4d1a-83d6-4ada0b1d407e tempest-ListServerFiltersTestJSON-450683048 tempest-ListServerFiltersTestJSON-450683048-project-member] Lock "bdbae548-eefc-4e59-8053-f4b8e232580d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.471s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.372426] env[65503]: DEBUG nova.network.neutron [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Successfully updated port: b2f08e99-4a10-4a2d-8aff-83b8e5454187 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 944.590397] env[65503]: DEBUG oslo_vmware.api [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450324, 'name': RemoveSnapshot_Task, 'duration_secs': 0.608521} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.590666] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 944.590897] env[65503]: INFO nova.compute.manager [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Took 14.27 seconds to snapshot the instance on the hypervisor. [ 944.774540] env[65503]: DEBUG oslo_vmware.api [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Task: {'id': task-4450327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162137} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.774815] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.775618] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.775618] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.775618] env[65503]: INFO nova.compute.manager [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Took 1.12 seconds to destroy the instance on the hypervisor. [ 944.775828] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 944.775862] env[65503]: DEBUG nova.compute.manager [-] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 944.775966] env[65503]: DEBUG nova.network.neutron [-] [instance: f666b0a3-3679-456b-bc59-38107c299f80] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 944.776525] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 944.776957] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 944.777235] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 944.834221] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 944.840476] env[65503]: DEBUG nova.objects.base [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 944.875609] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "refresh_cache-3b31611f-84f9-4bf6-8e26-f64db06d15ed" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.878508] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "refresh_cache-3b31611f-84f9-4bf6-8e26-f64db06d15ed" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.878508] env[65503]: DEBUG nova.network.neutron [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 945.129669] env[65503]: DEBUG nova.compute.manager [req-1977575b-87c5-4063-9ca7-f9e93a0aacaf req-79cc75a1-92b9-45f9-acb5-87a93425ef47 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Received event network-vif-deleted-78d90e7e-c69f-428e-aa59-7da9a9c04d8d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 945.129864] env[65503]: INFO nova.compute.manager [req-1977575b-87c5-4063-9ca7-f9e93a0aacaf req-79cc75a1-92b9-45f9-acb5-87a93425ef47 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Neutron deleted interface 78d90e7e-c69f-428e-aa59-7da9a9c04d8d; detaching it from the instance and deleting it from the info cache [ 945.130363] env[65503]: DEBUG nova.network.neutron [req-1977575b-87c5-4063-9ca7-f9e93a0aacaf req-79cc75a1-92b9-45f9-acb5-87a93425ef47 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 945.145978] env[65503]: DEBUG nova.compute.manager [None req-64bbd783-31ee-45d2-a9e4-6fb964cfcf82 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Found 1 images (rotation: 2) {{(pid=65503) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 945.151132] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6c2829-1cf2-40e4-920a-e1415e92a98e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.162516] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dbd985-c26f-4c02-a992-b2e4d2901828 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.198123] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a398f3b5-463c-4cd2-91b2-f402b5aae89b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.207287] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cc05b8-9ba8-41a7-ab63-73226844cd0c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.224757] env[65503]: DEBUG nova.compute.provider_tree [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.381155] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 945.382118] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 945.423928] env[65503]: DEBUG nova.network.neutron [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 945.443890] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 945.444332] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 945.506049] env[65503]: WARNING neutronclient.v2_0.client [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 945.506049] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 945.506049] env[65503]: WARNING openstack [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 945.563626] env[65503]: DEBUG nova.network.neutron [-] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 945.592095] env[65503]: DEBUG nova.network.neutron [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Updating instance_info_cache with network_info: [{"id": "b2f08e99-4a10-4a2d-8aff-83b8e5454187", "address": "fa:16:3e:a0:45:a5", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2f08e99-4a", "ovs_interfaceid": "b2f08e99-4a10-4a2d-8aff-83b8e5454187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 945.633802] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c257a54-24d4-44a5-9ad7-09467a687a3d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.647738] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328a7883-ee40-43d8-a010-9c734bd8a232 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.682339] env[65503]: DEBUG nova.compute.manager [req-1977575b-87c5-4063-9ca7-f9e93a0aacaf req-79cc75a1-92b9-45f9-acb5-87a93425ef47 service nova] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Detach interface failed, port_id=78d90e7e-c69f-428e-aa59-7da9a9c04d8d, reason: Instance f666b0a3-3679-456b-bc59-38107c299f80 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 945.729196] env[65503]: DEBUG nova.scheduler.client.report [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.066940] env[65503]: INFO nova.compute.manager [-] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Took 1.29 seconds to deallocate network for instance. [ 946.096670] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "refresh_cache-3b31611f-84f9-4bf6-8e26-f64db06d15ed" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.096670] env[65503]: DEBUG nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Instance network_info: |[{"id": "b2f08e99-4a10-4a2d-8aff-83b8e5454187", "address": "fa:16:3e:a0:45:a5", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2f08e99-4a", "ovs_interfaceid": "b2f08e99-4a10-4a2d-8aff-83b8e5454187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 946.096670] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:45:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2f08e99-4a10-4a2d-8aff-83b8e5454187', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.107437] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating folder: Project (8c5b0c3771b5434992cd58e1af539bde). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.107821] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5cd591ba-9403-4224-bbb5-f68f45241b08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.121134] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created folder: Project (8c5b0c3771b5434992cd58e1af539bde) in parent group-v870190. [ 946.121394] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating folder: Instances. Parent ref: group-v870412. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.121724] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11fe622d-1593-45d1-9fad-bd630e0e8c42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.134365] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created folder: Instances in parent group-v870412. [ 946.134742] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 946.134996] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.136185] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82171001-2088-4f70-bd3b-c762b23dbbb9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.159134] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.159134] env[65503]: value = "task-4450330" [ 946.159134] env[65503]: _type = "Task" [ 946.159134] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.172651] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450330, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.235199] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.401s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.237981] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.050s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.238309] env[65503]: DEBUG nova.objects.instance [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lazy-loading 'resources' on Instance uuid 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.270741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.271128] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.308883] env[65503]: DEBUG nova.compute.manager [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Received event network-changed-b2f08e99-4a10-4a2d-8aff-83b8e5454187 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 946.308954] env[65503]: DEBUG nova.compute.manager [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Refreshing instance network info cache due to event network-changed-b2f08e99-4a10-4a2d-8aff-83b8e5454187. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 946.311373] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Acquiring lock "refresh_cache-3b31611f-84f9-4bf6-8e26-f64db06d15ed" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.311533] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Acquired lock "refresh_cache-3b31611f-84f9-4bf6-8e26-f64db06d15ed" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.311755] env[65503]: DEBUG nova.network.neutron [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Refreshing network info cache for port b2f08e99-4a10-4a2d-8aff-83b8e5454187 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 946.415097] env[65503]: DEBUG nova.compute.manager [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 946.416692] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87ead2f-85dc-4c76-a395-2e4b61811d5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.576832] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.669837] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450330, 'name': CreateVM_Task, 'duration_secs': 0.345777} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.669991] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 946.670538] env[65503]: WARNING neutronclient.v2_0.client [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 946.670909] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.671130] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.671501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 946.671766] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a3799e0-0934-4e25-a79e-3a527e41115d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.677622] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 946.677622] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52581544-ca7f-295e-d455-4d05aad6f095" [ 946.677622] env[65503]: _type = "Task" [ 946.677622] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.686787] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52581544-ca7f-295e-d455-4d05aad6f095, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.747761] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf0ef919-3236-4de3-a715-043cc2224442 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 37.810s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.748691] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 14.953s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.748871] env[65503]: INFO nova.compute.manager [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Unshelving [ 946.776292] env[65503]: INFO nova.compute.manager [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Detaching volume adb6e1a6-ba84-48df-9bdb-d884f47fcd90 [ 946.815138] env[65503]: WARNING neutronclient.v2_0.client [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 946.816969] env[65503]: WARNING openstack [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 946.816969] env[65503]: WARNING openstack [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 946.826696] env[65503]: INFO nova.virt.block_device [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Attempting to driver detach volume adb6e1a6-ba84-48df-9bdb-d884f47fcd90 from mountpoint /dev/sdb [ 946.827032] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 946.827239] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870394', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'name': 'volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e4c1c94b-744f-4bed-8e68-3b3f9de7db44', 'attached_at': '', 'detached_at': '', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'serial': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 946.828412] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f640a6-d0f1-4ae1-bf1a-f200c0ab7c06 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.857776] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852be451-bbad-48ec-aa28-e5114ea72062 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.868460] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a14f5f5-60b1-4981-ae47-36315cca6420 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.899776] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28eb777e-811f-4b3a-84ed-280c46dde275 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.925083] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The volume has not been displaced from its original location: [datastore2] volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90/volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 946.930272] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Reconfiguring VM instance instance-0000003c to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 946.934195] env[65503]: WARNING openstack [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 946.934601] env[65503]: WARNING openstack [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 946.941469] env[65503]: INFO nova.compute.manager [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] instance snapshotting [ 946.941924] env[65503]: DEBUG nova.objects.instance [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'flavor' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.943169] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44d551c8-e9db-4afb-bd7f-9f9a42286ea1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.963549] env[65503]: DEBUG oslo_vmware.api [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 946.963549] env[65503]: value = "task-4450331" [ 946.963549] env[65503]: _type = "Task" [ 946.963549] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.973957] env[65503]: DEBUG oslo_vmware.api [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450331, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.014790] env[65503]: WARNING neutronclient.v2_0.client [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 947.015075] env[65503]: WARNING openstack [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 947.015526] env[65503]: WARNING openstack [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 947.130361] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bffe0d1-14db-484d-8ee5-5c8ee2da4140 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.141035] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adade943-f28f-4233-b0dd-55dad5590f47 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.180261] env[65503]: DEBUG nova.network.neutron [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Updated VIF entry in instance network info cache for port b2f08e99-4a10-4a2d-8aff-83b8e5454187. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 947.180261] env[65503]: DEBUG nova.network.neutron [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Updating instance_info_cache with network_info: [{"id": "b2f08e99-4a10-4a2d-8aff-83b8e5454187", "address": "fa:16:3e:a0:45:a5", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2f08e99-4a", "ovs_interfaceid": "b2f08e99-4a10-4a2d-8aff-83b8e5454187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 947.182089] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510fdcdd-a9a6-458a-8640-0503a02a75ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.196462] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52581544-ca7f-295e-d455-4d05aad6f095, 'name': SearchDatastore_Task, 'duration_secs': 0.011608} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.199248] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.199600] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.199864] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.200075] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.200308] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.201039] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cfe4be6-c730-4b53-bd40-11a61614f197 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.205226] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7088a10-390e-48ee-b759-0530d38b481c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.224999] env[65503]: DEBUG nova.compute.provider_tree [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.230200] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 947.230398] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 947.232155] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0303d621-9a55-44c9-b87f-0e2402c0e6fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.238237] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 947.238237] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e13205-7fe2-a7a2-5df0-2e05bb25c9d9" [ 947.238237] env[65503]: _type = "Task" [ 947.238237] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.250562] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e13205-7fe2-a7a2-5df0-2e05bb25c9d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.461314] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de77b379-d251-4229-8ee2-adab7a7366e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.473289] env[65503]: DEBUG oslo_vmware.api [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450331, 'name': ReconfigVM_Task, 'duration_secs': 0.254117} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.486228] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Reconfigured VM instance instance-0000003c to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 947.490965] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb9cce74-aca3-41b0-a4e9-d722cf0ff026 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.501384] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afc2670-7f2b-46b3-ac27-c5a83136d0c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.513872] env[65503]: DEBUG oslo_vmware.api [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 947.513872] env[65503]: value = "task-4450332" [ 947.513872] env[65503]: _type = "Task" [ 947.513872] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.522901] env[65503]: DEBUG oslo_vmware.api [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450332, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.687556] env[65503]: DEBUG oslo_concurrency.lockutils [req-ffa4195d-0548-4fc5-9da6-72a5d77203e4 req-1dcf55f7-768e-4cad-89e4-7df21657702c service nova] Releasing lock "refresh_cache-3b31611f-84f9-4bf6-8e26-f64db06d15ed" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.734274] env[65503]: DEBUG nova.scheduler.client.report [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.751063] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e13205-7fe2-a7a2-5df0-2e05bb25c9d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010692} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.751278] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d2dce8e-2362-45d0-8a0f-54aa0c359aa5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.763770] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 947.763770] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ca9d6-a369-9842-b292-d1b15218165c" [ 947.763770] env[65503]: _type = "Task" [ 947.763770] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.777266] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ca9d6-a369-9842-b292-d1b15218165c, 'name': SearchDatastore_Task} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.777603] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.777928] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 3b31611f-84f9-4bf6-8e26-f64db06d15ed/3b31611f-84f9-4bf6-8e26-f64db06d15ed.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 947.778615] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6ab11ef-7e74-4be7-b813-12fee61fbd5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.782205] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.789236] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 947.789236] env[65503]: value = "task-4450333" [ 947.789236] env[65503]: _type = "Task" [ 947.789236] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.798652] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.017050] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 948.017050] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ff5765fc-e6e4-40ab-a8b9-cb3c12e5c7ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.032981] env[65503]: DEBUG oslo_vmware.api [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.034350] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 948.034350] env[65503]: value = "task-4450334" [ 948.034350] env[65503]: _type = "Task" [ 948.034350] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.046125] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450334, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.240195] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.002s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.243176] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.773s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.243471] env[65503]: DEBUG nova.objects.instance [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lazy-loading 'resources' on Instance uuid ad85eef0-cef7-4900-b193-1737a6c2f17b {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.297753] env[65503]: INFO nova.scheduler.client.report [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Deleted allocations for instance 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d [ 948.308177] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450333, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.528586] env[65503]: DEBUG oslo_vmware.api [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450332, 'name': ReconfigVM_Task, 'duration_secs': 0.807963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.528909] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870394', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'name': 'volume-adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e4c1c94b-744f-4bed-8e68-3b3f9de7db44', 'attached_at': '', 'detached_at': '', 'volume_id': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90', 'serial': 'adb6e1a6-ba84-48df-9bdb-d884f47fcd90'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 948.545327] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450334, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.805453] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602073} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.805570] env[65503]: DEBUG oslo_concurrency.lockutils [None req-acea6088-dac0-4ea5-8026-c20df0875592 tempest-ServersListShow2100Test-2091117046 tempest-ServersListShow2100Test-2091117046-project-member] Lock "0ece7ad4-40cd-43e4-8cbc-dddd43f0645d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.897s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.808859] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 3b31611f-84f9-4bf6-8e26-f64db06d15ed/3b31611f-84f9-4bf6-8e26-f64db06d15ed.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 948.809238] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 948.809781] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35e5191a-b78a-4dc9-a30a-c483e6eb2324 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.818143] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 948.818143] env[65503]: value = "task-4450335" [ 948.818143] env[65503]: _type = "Task" [ 948.818143] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.830563] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.988087] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275048df-9671-4e2e-8650-37e9472e451e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.997772] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1db9ed-858c-42fb-86e9-8670e56889c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.031969] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b823b37f-dd0b-419c-bfd3-23535fce23a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.044978] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b69ee1-f17f-4f55-b8b0-3f35cfa2e78a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.052514] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450334, 'name': CreateSnapshot_Task, 'duration_secs': 0.624918} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.053182] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 949.054083] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734fe2d2-4ba8-497f-9980-4b73c38038f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.065675] env[65503]: DEBUG nova.compute.provider_tree [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.080681] env[65503]: DEBUG nova.objects.instance [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.330136] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141549} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.330136] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 949.330600] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3950ad7-86ee-4168-8dff-76cc8ef05fc2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.356306] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 3b31611f-84f9-4bf6-8e26-f64db06d15ed/3b31611f-84f9-4bf6-8e26-f64db06d15ed.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.356700] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d33a119-6216-4125-a0f2-9f4e7a2bd038 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.380022] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 949.380022] env[65503]: value = "task-4450336" [ 949.380022] env[65503]: _type = "Task" [ 949.380022] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.396284] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450336, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.447712] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.447955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.569085] env[65503]: DEBUG nova.scheduler.client.report [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.585031] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 949.588368] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ef396fba-33a9-433a-9be6-93045ce6121c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.603864] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 949.603864] env[65503]: value = "task-4450337" [ 949.603864] env[65503]: _type = "Task" [ 949.603864] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.615537] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450337, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.890904] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450336, 'name': ReconfigVM_Task, 'duration_secs': 0.325935} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.891213] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 3b31611f-84f9-4bf6-8e26-f64db06d15ed/3b31611f-84f9-4bf6-8e26-f64db06d15ed.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.891894] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-906ea8b9-0f80-4b6b-9a67-24a2055be585 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.900571] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 949.900571] env[65503]: value = "task-4450338" [ 949.900571] env[65503]: _type = "Task" [ 949.900571] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.913380] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450338, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.951244] env[65503]: DEBUG nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 950.077649] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.834s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.080755] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.029s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.095749] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb92c918-b039-492d-ae75-2b0ceca6e0dc tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.824s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.111486] env[65503]: INFO nova.scheduler.client.report [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Deleted allocations for instance ad85eef0-cef7-4900-b193-1737a6c2f17b [ 950.116832] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450337, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.412430] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450338, 'name': Rename_Task, 'duration_secs': 0.172638} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.412804] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 950.413317] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fc66b74-781b-4d20-ac04-75a02aeb4ee8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.421591] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 950.421591] env[65503]: value = "task-4450339" [ 950.421591] env[65503]: _type = "Task" [ 950.421591] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.430746] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450339, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.485619] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.530500] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.530874] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.531242] env[65503]: DEBUG nova.compute.manager [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 950.532178] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4861bde-5d02-40e3-9aba-7b38a2dcebbc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.539840] env[65503]: DEBUG nova.compute.manager [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 950.540543] env[65503]: DEBUG nova.objects.instance [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.616017] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450337, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.623668] env[65503]: DEBUG oslo_concurrency.lockutils [None req-be082265-647c-4b06-a27c-51f51e57af22 tempest-ServersAdminTestJSON-1439417996 tempest-ServersAdminTestJSON-1439417996-project-member] Lock "ad85eef0-cef7-4900-b193-1737a6c2f17b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.650s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.854694] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d36b8cc-d6f1-45ff-8b12-10d4d1a450f4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.867434] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb20131a-8a4b-4f28-9f72-73ea42c9eeb8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.909894] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6791c6d7-c91e-489e-9b20-ed278c595076 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.919662] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586fe3a4-795e-428a-8d26-f0fe4e0d315a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.943236] env[65503]: DEBUG nova.compute.provider_tree [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.948218] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450339, 'name': PowerOnVM_Task} progress is 90%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.118041] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450337, 'name': CloneVM_Task, 'duration_secs': 1.480834} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.118322] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Created linked-clone VM from snapshot [ 951.119135] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2c040a-4548-4a8a-94d9-5099a78c0681 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.127845] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Uploading image a94454fe-9ef4-4eea-bd03-a3ef0c5cd96e {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 951.149358] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 951.149358] env[65503]: value = "vm-870416" [ 951.149358] env[65503]: _type = "VirtualMachine" [ 951.149358] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 951.149658] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-56b12644-bd77-4d9b-bf3b-f0c00952bca7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.158585] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease: (returnval){ [ 951.158585] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ffdcf7-2e6d-d53b-a0ad-6b795b48cb16" [ 951.158585] env[65503]: _type = "HttpNfcLease" [ 951.158585] env[65503]: } obtained for exporting VM: (result){ [ 951.158585] env[65503]: value = "vm-870416" [ 951.158585] env[65503]: _type = "VirtualMachine" [ 951.158585] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 951.158585] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the lease: (returnval){ [ 951.158585] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ffdcf7-2e6d-d53b-a0ad-6b795b48cb16" [ 951.158585] env[65503]: _type = "HttpNfcLease" [ 951.158585] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 951.166590] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 951.166590] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ffdcf7-2e6d-d53b-a0ad-6b795b48cb16" [ 951.166590] env[65503]: _type = "HttpNfcLease" [ 951.166590] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 951.439739] env[65503]: DEBUG oslo_vmware.api [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450339, 'name': PowerOnVM_Task, 'duration_secs': 0.562705} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.440075] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 951.442035] env[65503]: INFO nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Took 7.96 seconds to spawn the instance on the hypervisor. [ 951.442035] env[65503]: DEBUG nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 951.442035] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f3229a-3e6d-4bd6-a600-131205229357 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.451947] env[65503]: DEBUG nova.scheduler.client.report [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.550091] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.550412] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-608f227b-8593-4869-9a68-b60edc4b5824 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.558260] env[65503]: DEBUG oslo_vmware.api [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 951.558260] env[65503]: value = "task-4450341" [ 951.558260] env[65503]: _type = "Task" [ 951.558260] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.568786] env[65503]: DEBUG oslo_vmware.api [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450341, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.667344] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 951.667344] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ffdcf7-2e6d-d53b-a0ad-6b795b48cb16" [ 951.667344] env[65503]: _type = "HttpNfcLease" [ 951.667344] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 951.667672] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 951.667672] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ffdcf7-2e6d-d53b-a0ad-6b795b48cb16" [ 951.667672] env[65503]: _type = "HttpNfcLease" [ 951.667672] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 951.668377] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6e1b34-7142-49a9-933b-08a23cd526e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.678057] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520e6944-814f-8bf3-43ab-698104cdad40/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 951.678381] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520e6944-814f-8bf3-43ab-698104cdad40/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 951.781216] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-45f76a68-ff34-43fc-bac8-35aa75933fb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.971851] env[65503]: INFO nova.compute.manager [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Took 25.71 seconds to build instance. [ 952.070265] env[65503]: DEBUG oslo_vmware.api [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450341, 'name': PowerOffVM_Task, 'duration_secs': 0.33478} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.070638] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.070910] env[65503]: DEBUG nova.compute.manager [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 952.072487] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3329d4f8-db34-4da2-a50d-013ebe434604 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.472833] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.392s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.477597] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.244s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.478995] env[65503]: INFO nova.compute.claims [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.482224] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6434ed1a-a05e-4a56-9c24-6a3f85fc008a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.234s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.587551] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0c6bdccc-a732-410a-a35f-9b7636ce9caa tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.661869] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.662694] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.662907] env[65503]: DEBUG nova.compute.manager [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 952.663945] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3f4eaf-f2df-492b-8021-47547300a59e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.674020] env[65503]: DEBUG nova.compute.manager [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 952.674020] env[65503]: DEBUG nova.objects.instance [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lazy-loading 'flavor' on Instance uuid 3b31611f-84f9-4bf6-8e26-f64db06d15ed {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.058839] env[65503]: INFO nova.scheduler.client.report [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted allocation for migration b9d122d6-a2f3-4147-a585-2287e9d12e22 [ 953.125847] env[65503]: DEBUG nova.objects.instance [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.566407] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2d050115-7802-4632-8989-72323ec90094 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 23.520s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.633870] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.634135] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.634346] env[65503]: DEBUG nova.network.neutron [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 953.634573] env[65503]: DEBUG nova.objects.instance [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'info_cache' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.687465] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.688144] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6a7dd68-f04d-47ce-9ece-46f1990eaf1a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.696849] env[65503]: DEBUG oslo_vmware.api [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 953.696849] env[65503]: value = "task-4450342" [ 953.696849] env[65503]: _type = "Task" [ 953.696849] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.710987] env[65503]: DEBUG oslo_vmware.api [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.748021] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74176f2-96b8-4c8d-a92c-27c76b660a2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.756841] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e415bf5-4e8e-4a1b-82f9-7e7500037cdd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.793979] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4a52ea-0888-475d-a6ad-76453532ab92 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.805777] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b9c46d-ed16-4299-a33c-bb3e0b50a271 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.821648] env[65503]: DEBUG nova.compute.provider_tree [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.138554] env[65503]: DEBUG nova.objects.base [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 954.217099] env[65503]: DEBUG oslo_vmware.api [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450342, 'name': PowerOffVM_Task, 'duration_secs': 0.23906} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.217099] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.217099] env[65503]: DEBUG nova.compute.manager [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 954.217099] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb398824-51aa-4af7-929e-7539a10f1b0c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.325874] env[65503]: DEBUG nova.scheduler.client.report [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.641906] env[65503]: WARNING neutronclient.v2_0.client [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 954.642652] env[65503]: WARNING openstack [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 954.643022] env[65503]: WARNING openstack [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 954.737606] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b83ac116-6471-4b12-b04d-d6db80768c21 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.075s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.803430] env[65503]: WARNING openstack [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 954.803847] env[65503]: WARNING openstack [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 954.833817] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.833817] env[65503]: DEBUG nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 954.841542] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.962s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.843311] env[65503]: INFO nova.compute.claims [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.878401] env[65503]: WARNING neutronclient.v2_0.client [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 954.879162] env[65503]: WARNING openstack [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 954.879520] env[65503]: WARNING openstack [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 954.974600] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "708ed8ab-0ec9-457c-966d-b11c55895981" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.975039] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.975172] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.975355] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.976447] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.979862] env[65503]: INFO nova.compute.manager [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Terminating instance [ 954.984890] env[65503]: DEBUG nova.network.neutron [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updating instance_info_cache with network_info: [{"id": "12c029b6-d630-419b-8167-53eb6612a069", "address": "fa:16:3e:47:e5:1b", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12c029b6-d6", "ovs_interfaceid": "12c029b6-d630-419b-8167-53eb6612a069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 955.348435] env[65503]: DEBUG nova.compute.utils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 955.352870] env[65503]: DEBUG nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 955.352870] env[65503]: DEBUG nova.network.neutron [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 955.353132] env[65503]: WARNING neutronclient.v2_0.client [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 955.353464] env[65503]: WARNING neutronclient.v2_0.client [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 955.354038] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 955.354407] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 955.423597] env[65503]: DEBUG nova.policy [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55db708d2a9b47baa25cafed2be1ba91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '463e93d05e1e4b27a3dc866a5b1991d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.488588] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "refresh_cache-e4c1c94b-744f-4bed-8e68-3b3f9de7db44" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.493780] env[65503]: DEBUG nova.compute.manager [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 955.494096] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 955.495037] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1de7291-fb8c-475c-8d9d-00823467625b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.507974] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.508261] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-906a7484-1185-4808-b0c4-2386382200dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.520678] env[65503]: DEBUG oslo_vmware.api [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 955.520678] env[65503]: value = "task-4450343" [ 955.520678] env[65503]: _type = "Task" [ 955.520678] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.533351] env[65503]: DEBUG oslo_vmware.api [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.552903] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "82415443-1884-4898-996e-828d23f67f23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.553059] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.640275] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ff288c-52b6-48f1-a418-10094eb954fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.649928] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a50b61-b1e5-49ae-8b8b-f906e42a682b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.684645] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba6631a-8203-4461-9859-fbc3ce808403 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.694106] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa44d2d4-f236-4e54-aa52-8187a251f48d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.710019] env[65503]: DEBUG nova.compute.provider_tree [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.832240] env[65503]: DEBUG nova.network.neutron [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Successfully created port: aa79a652-2e09-4670-8d1d-195369c94b2b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 955.853673] env[65503]: DEBUG nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 956.041336] env[65503]: DEBUG oslo_vmware.api [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450343, 'name': PowerOffVM_Task, 'duration_secs': 0.252371} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.041702] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.041864] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.042156] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-732a48aa-14c1-4314-b104-7a8b74cf27d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.059048] env[65503]: DEBUG nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 956.108932] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.109178] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.109387] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleting the datastore file [datastore1] 708ed8ab-0ec9-457c-966d-b11c55895981 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.109670] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89a1d614-d2dc-4d96-8b09-ce584f23202e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.118572] env[65503]: DEBUG oslo_vmware.api [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 956.118572] env[65503]: value = "task-4450345" [ 956.118572] env[65503]: _type = "Task" [ 956.118572] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.128283] env[65503]: DEBUG oslo_vmware.api [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.213666] env[65503]: DEBUG nova.scheduler.client.report [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.218270] env[65503]: DEBUG nova.compute.manager [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 956.219012] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617cca52-b986-4337-b0f5-f5ed2895c3d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.495995] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.495995] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9fb064f-d54e-40e2-bc5a-5f938bda47b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.504355] env[65503]: DEBUG oslo_vmware.api [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 956.504355] env[65503]: value = "task-4450346" [ 956.504355] env[65503]: _type = "Task" [ 956.504355] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.513388] env[65503]: DEBUG oslo_vmware.api [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.582255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.629433] env[65503]: DEBUG oslo_vmware.api [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165465} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.629801] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 956.630051] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 956.630322] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 956.630423] env[65503]: INFO nova.compute.manager [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Took 1.14 seconds to destroy the instance on the hypervisor. [ 956.630658] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 956.630857] env[65503]: DEBUG nova.compute.manager [-] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 956.630966] env[65503]: DEBUG nova.network.neutron [-] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 956.631240] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 956.631923] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 956.632268] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 956.719799] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.879s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.720631] env[65503]: DEBUG nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 956.723813] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.079s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.724027] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.724235] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 956.724618] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.148s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.724918] env[65503]: DEBUG nova.objects.instance [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lazy-loading 'resources' on Instance uuid f666b0a3-3679-456b-bc59-38107c299f80 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.727539] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5af041-c209-4d08-84b7-5a83474b2a28 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.735240] env[65503]: INFO nova.compute.manager [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] instance snapshotting [ 956.735240] env[65503]: WARNING nova.compute.manager [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 956.739757] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf050b64-7dbf-4400-b8a5-fd0edb8e58ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.747136] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092be4ba-afc5-4724-bb76-9058a729d058 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.768592] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae13848-42c6-4e89-bea2-b2a443cb2c03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.784238] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3af3168-2bf7-4a57-b518-55ba8a1e4524 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.798273] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f551f89-d989-4a2f-a29a-2a71f336281c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.834109] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177923MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 956.834625] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.865224] env[65503]: DEBUG nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 956.898468] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 956.898752] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.898927] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 956.899194] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.899404] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 956.899704] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 956.899790] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 956.899938] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 956.900122] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 956.900290] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 956.900470] env[65503]: DEBUG nova.virt.hardware [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 956.901958] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180ca7ab-1860-48bb-8435-b80d9454b1db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.911616] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b446151-62eb-4cc2-8dac-187787ae2f1c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.014757] env[65503]: DEBUG oslo_vmware.api [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450346, 'name': PowerOnVM_Task, 'duration_secs': 0.44357} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.015274] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.015527] env[65503]: DEBUG nova.compute.manager [None req-bf6da78f-57e6-484d-99e0-5a0115cf65d1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 957.016365] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fdb986-e02e-40ab-8430-98cdcaa5dd91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.230751] env[65503]: DEBUG nova.compute.utils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 957.238665] env[65503]: DEBUG nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 957.238955] env[65503]: DEBUG nova.network.neutron [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 957.239547] env[65503]: WARNING neutronclient.v2_0.client [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 957.240305] env[65503]: WARNING neutronclient.v2_0.client [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 957.241027] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 957.241333] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 957.296969] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 957.297305] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0b8a1f78-adce-4bf3-b070-2da481883ba5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.310020] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 957.310020] env[65503]: value = "task-4450347" [ 957.310020] env[65503]: _type = "Task" [ 957.310020] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.326794] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450347, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.412672] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 957.431393] env[65503]: DEBUG nova.network.neutron [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Successfully updated port: aa79a652-2e09-4670-8d1d-195369c94b2b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 957.458867] env[65503]: DEBUG nova.policy [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9337f2cb77a24772aba3ef0eb341f2d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd6e7f469b7d408fae0621171c096f0a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 957.534192] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40adb62-e594-4d8b-bcec-19d6dab8f962 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.543999] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7400340a-8b03-424c-9304-5d7539f15def {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.577963] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6b35d5-6d01-4d9c-8d18-d8ce9f36c186 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.587942] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ab5f43-1cb7-42c5-b723-79dfb04af80e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.605364] env[65503]: DEBUG nova.compute.provider_tree [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.754541] env[65503]: DEBUG nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 957.826409] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450347, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.843204] env[65503]: DEBUG nova.network.neutron [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Successfully created port: 53fe187f-f12f-40ad-94fa-75bb9f0182ed {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 957.906599] env[65503]: DEBUG nova.compute.manager [req-3440c7e2-9c4b-432b-a971-20f0ceeed20f req-3b41ea4b-2c24-47ad-a598-7e0e596d9263 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Received event network-vif-plugged-aa79a652-2e09-4670-8d1d-195369c94b2b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 957.907045] env[65503]: DEBUG oslo_concurrency.lockutils [req-3440c7e2-9c4b-432b-a971-20f0ceeed20f req-3b41ea4b-2c24-47ad-a598-7e0e596d9263 service nova] Acquiring lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.907045] env[65503]: DEBUG oslo_concurrency.lockutils [req-3440c7e2-9c4b-432b-a971-20f0ceeed20f req-3b41ea4b-2c24-47ad-a598-7e0e596d9263 service nova] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.907375] env[65503]: DEBUG oslo_concurrency.lockutils [req-3440c7e2-9c4b-432b-a971-20f0ceeed20f req-3b41ea4b-2c24-47ad-a598-7e0e596d9263 service nova] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.907375] env[65503]: DEBUG nova.compute.manager [req-3440c7e2-9c4b-432b-a971-20f0ceeed20f req-3b41ea4b-2c24-47ad-a598-7e0e596d9263 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] No waiting events found dispatching network-vif-plugged-aa79a652-2e09-4670-8d1d-195369c94b2b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 957.907900] env[65503]: WARNING nova.compute.manager [req-3440c7e2-9c4b-432b-a971-20f0ceeed20f req-3b41ea4b-2c24-47ad-a598-7e0e596d9263 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Received unexpected event network-vif-plugged-aa79a652-2e09-4670-8d1d-195369c94b2b for instance with vm_state building and task_state spawning. [ 957.938050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-81e27e9b-7d7e-4b04-8257-268660fd9ec3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.938371] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-81e27e9b-7d7e-4b04-8257-268660fd9ec3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.938444] env[65503]: DEBUG nova.network.neutron [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 958.109608] env[65503]: DEBUG nova.scheduler.client.report [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.219263] env[65503]: DEBUG nova.network.neutron [-] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 958.325685] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450347, 'name': CreateSnapshot_Task, 'duration_secs': 0.858732} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.325992] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 958.327211] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760d2e44-4426-4fd7-a1c9-dd638c2899a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.444170] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 958.444882] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 958.617136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.619653] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.837s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.619887] env[65503]: DEBUG nova.objects.instance [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lazy-loading 'pci_requests' on Instance uuid ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.647890] env[65503]: INFO nova.scheduler.client.report [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Deleted allocations for instance f666b0a3-3679-456b-bc59-38107c299f80 [ 958.722063] env[65503]: INFO nova.compute.manager [-] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Took 2.09 seconds to deallocate network for instance. [ 958.762117] env[65503]: DEBUG nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 958.792470] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 958.792845] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.793091] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 958.793336] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.793477] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 958.793621] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 958.793824] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.793973] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 958.794142] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 958.794304] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 958.794549] env[65503]: DEBUG nova.virt.hardware [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 958.795426] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fe3861-eed0-4ad9-ab16-bb2c1a8177f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.804945] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9584865c-a086-4901-aa63-6721484d6473 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.849155] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 958.849560] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-265c96af-59bd-43d2-b310-723b88b3e6ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.860063] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 958.860063] env[65503]: value = "task-4450348" [ 958.860063] env[65503]: _type = "Task" [ 958.860063] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.870571] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450348, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.026774] env[65503]: DEBUG nova.network.neutron [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 959.124081] env[65503]: DEBUG nova.objects.instance [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lazy-loading 'numa_topology' on Instance uuid ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.156821] env[65503]: DEBUG oslo_concurrency.lockutils [None req-464c8d9a-de21-4aa7-b833-447491396965 tempest-ServerMetadataTestJSON-1379754277 tempest-ServerMetadataTestJSON-1379754277-project-member] Lock "f666b0a3-3679-456b-bc59-38107c299f80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.004s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.231065] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.376466] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450348, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.513394] env[65503]: DEBUG nova.network.neutron [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Successfully updated port: 53fe187f-f12f-40ad-94fa-75bb9f0182ed {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 959.573375] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 959.573375] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 959.632146] env[65503]: INFO nova.compute.claims [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.694384] env[65503]: WARNING neutronclient.v2_0.client [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 959.695732] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 959.696428] env[65503]: WARNING openstack [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 959.875803] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450348, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.018303] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "refresh_cache-8962b1b8-4875-4a1a-b231-36385755a976" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.018303] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "refresh_cache-8962b1b8-4875-4a1a-b231-36385755a976" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.018303] env[65503]: DEBUG nova.network.neutron [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 960.176084] env[65503]: DEBUG nova.network.neutron [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Updating instance_info_cache with network_info: [{"id": "aa79a652-2e09-4670-8d1d-195369c94b2b", "address": "fa:16:3e:69:d1:da", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa79a652-2e", "ovs_interfaceid": "aa79a652-2e09-4670-8d1d-195369c94b2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 960.269175] env[65503]: DEBUG nova.compute.manager [req-daa4f178-610e-4473-b813-cfc66e35b13c req-9d6e95e3-8726-460d-8b95-0e062dcc65aa service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Received event network-vif-plugged-53fe187f-f12f-40ad-94fa-75bb9f0182ed {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 960.269387] env[65503]: DEBUG oslo_concurrency.lockutils [req-daa4f178-610e-4473-b813-cfc66e35b13c req-9d6e95e3-8726-460d-8b95-0e062dcc65aa service nova] Acquiring lock "8962b1b8-4875-4a1a-b231-36385755a976-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.270455] env[65503]: DEBUG oslo_concurrency.lockutils [req-daa4f178-610e-4473-b813-cfc66e35b13c req-9d6e95e3-8726-460d-8b95-0e062dcc65aa service nova] Lock "8962b1b8-4875-4a1a-b231-36385755a976-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.270455] env[65503]: DEBUG oslo_concurrency.lockutils [req-daa4f178-610e-4473-b813-cfc66e35b13c req-9d6e95e3-8726-460d-8b95-0e062dcc65aa service nova] Lock "8962b1b8-4875-4a1a-b231-36385755a976-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.270455] env[65503]: DEBUG nova.compute.manager [req-daa4f178-610e-4473-b813-cfc66e35b13c req-9d6e95e3-8726-460d-8b95-0e062dcc65aa service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] No waiting events found dispatching network-vif-plugged-53fe187f-f12f-40ad-94fa-75bb9f0182ed {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 960.270455] env[65503]: WARNING nova.compute.manager [req-daa4f178-610e-4473-b813-cfc66e35b13c req-9d6e95e3-8726-460d-8b95-0e062dcc65aa service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Received unexpected event network-vif-plugged-53fe187f-f12f-40ad-94fa-75bb9f0182ed for instance with vm_state building and task_state spawning. [ 960.359622] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520e6944-814f-8bf3-43ab-698104cdad40/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 960.362049] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c111afba-9b8b-4deb-ba11-233902c4faab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.376840] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450348, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.377102] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520e6944-814f-8bf3-43ab-698104cdad40/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 960.377243] env[65503]: ERROR oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520e6944-814f-8bf3-43ab-698104cdad40/disk-0.vmdk due to incomplete transfer. [ 960.377488] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-70857687-55b0-45cc-8d80-9d52c775309f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.387178] env[65503]: DEBUG oslo_vmware.rw_handles [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520e6944-814f-8bf3-43ab-698104cdad40/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 960.387380] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Uploaded image a94454fe-9ef4-4eea-bd03-a3ef0c5cd96e to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 960.390275] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 960.390589] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-784b8b4f-9a5a-4a21-934c-29008c51f738 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.402030] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 960.402030] env[65503]: value = "task-4450349" [ 960.402030] env[65503]: _type = "Task" [ 960.402030] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.412170] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450349, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.431308] env[65503]: DEBUG nova.compute.manager [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Received event network-changed-aa79a652-2e09-4670-8d1d-195369c94b2b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 960.431651] env[65503]: DEBUG nova.compute.manager [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Refreshing instance network info cache due to event network-changed-aa79a652-2e09-4670-8d1d-195369c94b2b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 960.431908] env[65503]: DEBUG oslo_concurrency.lockutils [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Acquiring lock "refresh_cache-81e27e9b-7d7e-4b04-8257-268660fd9ec3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.524377] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 960.524833] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 960.572474] env[65503]: DEBUG nova.network.neutron [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 960.681025] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-81e27e9b-7d7e-4b04-8257-268660fd9ec3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.681208] env[65503]: DEBUG nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Instance network_info: |[{"id": "aa79a652-2e09-4670-8d1d-195369c94b2b", "address": "fa:16:3e:69:d1:da", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa79a652-2e", "ovs_interfaceid": "aa79a652-2e09-4670-8d1d-195369c94b2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 960.684582] env[65503]: DEBUG oslo_concurrency.lockutils [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Acquired lock "refresh_cache-81e27e9b-7d7e-4b04-8257-268660fd9ec3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.684787] env[65503]: DEBUG nova.network.neutron [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Refreshing network info cache for port aa79a652-2e09-4670-8d1d-195369c94b2b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 960.691191] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:d1:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa79a652-2e09-4670-8d1d-195369c94b2b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.705858] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 960.706608] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.707601] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5017c717-da7a-4c5f-9952-79641855a63f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.736037] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.736037] env[65503]: value = "task-4450350" [ 960.736037] env[65503]: _type = "Task" [ 960.736037] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.747054] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450350, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.790064] env[65503]: INFO nova.compute.manager [None req-70d6d351-1290-475c-9422-54562a9f28a6 tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Get console output [ 960.790456] env[65503]: WARNING nova.virt.vmwareapi.driver [None req-70d6d351-1290-475c-9422-54562a9f28a6 tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] The console log is missing. Check your VSPC configuration [ 960.863063] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 960.863580] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 961.753438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "909b3535-9410-4820-a34d-6c0e9627f506" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.753438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "909b3535-9410-4820-a34d-6c0e9627f506" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.753438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "909b3535-9410-4820-a34d-6c0e9627f506-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.753438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "909b3535-9410-4820-a34d-6c0e9627f506-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.753438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "909b3535-9410-4820-a34d-6c0e9627f506-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.754985] env[65503]: WARNING neutronclient.v2_0.client [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 961.755592] env[65503]: WARNING openstack [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 961.755960] env[65503]: WARNING openstack [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 961.765737] env[65503]: INFO nova.compute.manager [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Terminating instance [ 961.789226] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450349, 'name': Destroy_Task, 'duration_secs': 0.362527} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.789428] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450350, 'name': CreateVM_Task, 'duration_secs': 0.384156} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.789638] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450348, 'name': CloneVM_Task, 'duration_secs': 1.678974} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.790503] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Destroyed the VM [ 961.790744] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 961.790936] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.791173] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Created linked-clone VM from snapshot [ 961.791512] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-af0b333c-aea0-4232-ac44-8dbb79034072 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.795962] env[65503]: WARNING neutronclient.v2_0.client [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 961.795962] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.795962] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.795962] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.795962] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9ce9af-17be-49b7-b989-83c388ad9b71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.800309] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d76f0b5-1fea-40bb-b931-4c309d884b1a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.810807] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Uploading image f47b707b-e103-4bed-9885-087a7eda64cd {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 961.819813] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 961.819813] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e245a7-20b1-8bae-b408-7513f97b6f19" [ 961.819813] env[65503]: _type = "Task" [ 961.819813] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.822018] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 961.822018] env[65503]: value = "task-4450351" [ 961.822018] env[65503]: _type = "Task" [ 961.822018] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.832911] env[65503]: WARNING neutronclient.v2_0.client [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 961.833725] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 961.834218] env[65503]: WARNING openstack [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 961.846194] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ed14fa-288f-47db-bce8-8a854a18a406 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.858841] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e245a7-20b1-8bae-b408-7513f97b6f19, 'name': SearchDatastore_Task, 'duration_secs': 0.012856} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.859146] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450351, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.861158] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.861158] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.861158] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.861158] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.861158] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.861500] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-033c78a0-e4cf-4888-87d5-6f565a5ee339 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.867946] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 961.867946] env[65503]: value = "vm-870418" [ 961.867946] env[65503]: _type = "VirtualMachine" [ 961.867946] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 961.869684] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb92ff63-e8e1-40a6-add5-9929c098fdc6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.874152] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b3d59afe-9ac5-49c1-83d5-3f5c9d6fffc0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.879240] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.879463] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.880950] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dd7c9cd-bb7f-4b01-8be7-9f72bf066fdb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.916336] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51fedf0-c825-4345-ad54-02de66fe151b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.919145] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease: (returnval){ [ 961.919145] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ce4c8-488a-642c-e9e9-3316d55f7546" [ 961.919145] env[65503]: _type = "HttpNfcLease" [ 961.919145] env[65503]: } obtained for exporting VM: (result){ [ 961.919145] env[65503]: value = "vm-870418" [ 961.919145] env[65503]: _type = "VirtualMachine" [ 961.919145] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 961.919434] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the lease: (returnval){ [ 961.919434] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ce4c8-488a-642c-e9e9-3316d55f7546" [ 961.919434] env[65503]: _type = "HttpNfcLease" [ 961.919434] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 961.923656] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 961.923656] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d95629-a137-a31f-1646-576f971e2706" [ 961.923656] env[65503]: _type = "Task" [ 961.923656] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.932178] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5371e063-ec59-42b4-93e8-0f1fe10b92d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.943173] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 961.943173] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ce4c8-488a-642c-e9e9-3316d55f7546" [ 961.943173] env[65503]: _type = "HttpNfcLease" [ 961.943173] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 961.944078] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 961.944078] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ce4c8-488a-642c-e9e9-3316d55f7546" [ 961.944078] env[65503]: _type = "HttpNfcLease" [ 961.944078] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 961.944866] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fb787a-0154-47e7-a83f-441bb715da69 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.961468] env[65503]: DEBUG nova.compute.provider_tree [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.963358] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d95629-a137-a31f-1646-576f971e2706, 'name': SearchDatastore_Task, 'duration_secs': 0.010924} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.964762] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-084940eb-7a34-408a-97ad-3e0e9d1711ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.971982] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b1c36a-635c-0458-58d2-31529f050ec1/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 961.971982] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b1c36a-635c-0458-58d2-31529f050ec1/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 962.036116] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 962.036116] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bc12b2-ac59-347c-2941-0cbcad13104c" [ 962.036116] env[65503]: _type = "Task" [ 962.036116] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.049729] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bc12b2-ac59-347c-2941-0cbcad13104c, 'name': SearchDatastore_Task, 'duration_secs': 0.017977} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.050053] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.050518] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 81e27e9b-7d7e-4b04-8257-268660fd9ec3/81e27e9b-7d7e-4b04-8257-268660fd9ec3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.050689] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0009c50-778e-48df-bee5-36f8d2a5444e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.059726] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 962.059726] env[65503]: value = "task-4450353" [ 962.059726] env[65503]: _type = "Task" [ 962.059726] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.071022] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.101558] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-05541c5f-948f-4263-a595-8c9d218389f4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.113020] env[65503]: DEBUG nova.network.neutron [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Updating instance_info_cache with network_info: [{"id": "53fe187f-f12f-40ad-94fa-75bb9f0182ed", "address": "fa:16:3e:8f:43:ac", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53fe187f-f1", "ovs_interfaceid": "53fe187f-f12f-40ad-94fa-75bb9f0182ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 962.321206] env[65503]: WARNING openstack [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 962.321206] env[65503]: WARNING openstack [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 962.321206] env[65503]: DEBUG nova.compute.manager [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 962.321206] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.321206] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446b8f7f-e510-4966-82ff-31c2bdab8869 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.321206] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.321206] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80ce0fda-d7bf-4312-b218-da334d9df691 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.321206] env[65503]: DEBUG oslo_vmware.api [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 962.321206] env[65503]: value = "task-4450354" [ 962.321206] env[65503]: _type = "Task" [ 962.321206] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.332944] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "31ee1061-6199-4341-86ab-9ae606b269fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.333307] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "31ee1061-6199-4341-86ab-9ae606b269fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.333652] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "31ee1061-6199-4341-86ab-9ae606b269fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.336630] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "31ee1061-6199-4341-86ab-9ae606b269fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.336630] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "31ee1061-6199-4341-86ab-9ae606b269fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.345759] env[65503]: DEBUG oslo_vmware.api [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.345759] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450351, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.350041] env[65503]: INFO nova.compute.manager [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Terminating instance [ 962.370131] env[65503]: WARNING neutronclient.v2_0.client [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 962.371063] env[65503]: WARNING openstack [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 962.371486] env[65503]: WARNING openstack [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 962.464971] env[65503]: DEBUG nova.scheduler.client.report [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.575194] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450353, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.615185] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "refresh_cache-8962b1b8-4875-4a1a-b231-36385755a976" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.618967] env[65503]: DEBUG nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Instance network_info: |[{"id": "53fe187f-f12f-40ad-94fa-75bb9f0182ed", "address": "fa:16:3e:8f:43:ac", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53fe187f-f1", "ovs_interfaceid": "53fe187f-f12f-40ad-94fa-75bb9f0182ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 962.619675] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:43:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53fe187f-f12f-40ad-94fa-75bb9f0182ed', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.627561] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 962.629939] env[65503]: DEBUG nova.network.neutron [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Updated VIF entry in instance network info cache for port aa79a652-2e09-4670-8d1d-195369c94b2b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 962.630456] env[65503]: DEBUG nova.network.neutron [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Updating instance_info_cache with network_info: [{"id": "aa79a652-2e09-4670-8d1d-195369c94b2b", "address": "fa:16:3e:69:d1:da", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa79a652-2e", "ovs_interfaceid": "aa79a652-2e09-4670-8d1d-195369c94b2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 962.631720] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.632239] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d86e307-564e-46ba-8ecb-736599aa84f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.656343] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.656343] env[65503]: value = "task-4450355" [ 962.656343] env[65503]: _type = "Task" [ 962.656343] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.666353] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450355, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.710609] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "b6cda94b-2894-4cf0-8522-6593df9723bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.711519] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.712044] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "b6cda94b-2894-4cf0-8522-6593df9723bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.712265] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.712667] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.715775] env[65503]: INFO nova.compute.manager [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Terminating instance [ 962.830236] env[65503]: DEBUG oslo_vmware.api [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450354, 'name': PowerOffVM_Task, 'duration_secs': 0.387858} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.835960] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.835960] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.835960] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db644908-abfb-423f-9342-0945e8be5c14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.847183] env[65503]: DEBUG oslo_vmware.api [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450351, 'name': RemoveSnapshot_Task, 'duration_secs': 0.643262} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.847550] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 962.848292] env[65503]: INFO nova.compute.manager [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Took 15.39 seconds to snapshot the instance on the hypervisor. [ 962.852149] env[65503]: DEBUG nova.compute.manager [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 962.852375] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.853815] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2d8944-7668-480e-911a-c510f4a827be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.863226] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.863753] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5780b323-fc98-4509-b72f-cf2a305e40b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.872744] env[65503]: DEBUG oslo_vmware.api [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 962.872744] env[65503]: value = "task-4450357" [ 962.872744] env[65503]: _type = "Task" [ 962.872744] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.884052] env[65503]: DEBUG oslo_vmware.api [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.911904] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.912143] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.914158] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleting the datastore file [datastore2] 909b3535-9410-4820-a34d-6c0e9627f506 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.914158] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae1dbb16-5a6a-473c-b9b8-3c2e02835278 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.922372] env[65503]: DEBUG oslo_vmware.api [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 962.922372] env[65503]: value = "task-4450358" [ 962.922372] env[65503]: _type = "Task" [ 962.922372] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.935154] env[65503]: DEBUG oslo_vmware.api [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.979860] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.360s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.980285] env[65503]: WARNING neutronclient.v2_0.client [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 962.983072] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.498s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.984697] env[65503]: INFO nova.compute.claims [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 963.080643] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450353, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581809} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.080854] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 81e27e9b-7d7e-4b04-8257-268660fd9ec3/81e27e9b-7d7e-4b04-8257-268660fd9ec3.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 963.081261] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 963.081606] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62285640-5287-483f-bdc6-b387132ba414 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.091110] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 963.091110] env[65503]: value = "task-4450359" [ 963.091110] env[65503]: _type = "Task" [ 963.091110] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.103678] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450359, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.150815] env[65503]: DEBUG oslo_concurrency.lockutils [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] Releasing lock "refresh_cache-81e27e9b-7d7e-4b04-8257-268660fd9ec3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.150815] env[65503]: DEBUG nova.compute.manager [req-674e8045-17f2-40b5-bb05-8a5d7bf256df req-b5a692db-3bdd-451a-9919-458d993c28d2 service nova] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Received event network-vif-deleted-19156670-d2c9-45a9-b9a1-2ab187cf5f4f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 963.167526] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450355, 'name': CreateVM_Task, 'duration_secs': 0.478036} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.167526] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.167906] env[65503]: WARNING neutronclient.v2_0.client [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.168353] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.168566] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.168928] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 963.169307] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9df9109-ca1a-4c9c-b2a7-cd15ff9cb727 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.176123] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 963.176123] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c912cb-cc72-f9ee-edd1-f6303d0efe14" [ 963.176123] env[65503]: _type = "Task" [ 963.176123] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.185554] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c912cb-cc72-f9ee-edd1-f6303d0efe14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.220704] env[65503]: DEBUG nova.compute.manager [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 963.220952] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.221947] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea013415-e796-49a6-b065-5db0e1b0f7e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.231868] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.232198] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1eb9c3fe-7930-41bd-81a4-4a5446047687 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.241874] env[65503]: DEBUG oslo_vmware.api [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 963.241874] env[65503]: value = "task-4450360" [ 963.241874] env[65503]: _type = "Task" [ 963.241874] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.253504] env[65503]: DEBUG oslo_vmware.api [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4450360, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.382910] env[65503]: DEBUG nova.compute.manager [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Received event network-changed-53fe187f-f12f-40ad-94fa-75bb9f0182ed {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 963.383735] env[65503]: DEBUG nova.compute.manager [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Refreshing instance network info cache due to event network-changed-53fe187f-f12f-40ad-94fa-75bb9f0182ed. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 963.384209] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Acquiring lock "refresh_cache-8962b1b8-4875-4a1a-b231-36385755a976" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.384647] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Acquired lock "refresh_cache-8962b1b8-4875-4a1a-b231-36385755a976" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.384647] env[65503]: DEBUG nova.network.neutron [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Refreshing network info cache for port 53fe187f-f12f-40ad-94fa-75bb9f0182ed {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 963.391652] env[65503]: DEBUG oslo_vmware.api [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450357, 'name': PowerOffVM_Task, 'duration_secs': 0.261171} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.394468] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.394468] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.394468] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c015b73c-ab19-476c-b5d4-f73022c7971c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.422296] env[65503]: DEBUG nova.compute.manager [None req-e970aa51-737b-4ea0-a2de-f1986c6f30cb tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Found 2 images (rotation: 2) {{(pid=65503) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 963.436872] env[65503]: DEBUG oslo_vmware.api [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184331} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.437372] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.437616] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.437838] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.438035] env[65503]: INFO nova.compute.manager [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Took 1.16 seconds to destroy the instance on the hypervisor. [ 963.438311] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 963.438523] env[65503]: DEBUG nova.compute.manager [-] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 963.438621] env[65503]: DEBUG nova.network.neutron [-] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 963.438875] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.439443] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 963.440328] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 963.480635] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.480946] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.481078] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Deleting the datastore file [datastore2] 31ee1061-6199-4341-86ab-9ae606b269fe {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.481396] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca148a81-4570-4b75-90c8-2d4451967403 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.496247] env[65503]: DEBUG oslo_vmware.api [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for the task: (returnval){ [ 963.496247] env[65503]: value = "task-4450362" [ 963.496247] env[65503]: _type = "Task" [ 963.496247] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.497598] env[65503]: INFO nova.network.neutron [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating port 73e98445-c951-4dc2-82e3-537e2196f82a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 963.510938] env[65503]: DEBUG oslo_vmware.api [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.608629] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450359, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131129} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.609079] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.609834] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2810eca1-c144-4bf4-9059-b3242ea728d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.646477] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 81e27e9b-7d7e-4b04-8257-268660fd9ec3/81e27e9b-7d7e-4b04-8257-268660fd9ec3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.647433] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b427ee7-dfd7-40bf-8ca5-114b320f2d1b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.671095] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 963.671095] env[65503]: value = "task-4450363" [ 963.671095] env[65503]: _type = "Task" [ 963.671095] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.688011] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c912cb-cc72-f9ee-edd1-f6303d0efe14, 'name': SearchDatastore_Task, 'duration_secs': 0.013044} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.692365] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.692365] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.692518] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.693373] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.693373] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.693373] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450363, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.695880] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-331f7fef-96ae-4703-b894-5ffee522339f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.722899] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.723145] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 963.724102] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5882bf23-0b0d-4dcb-9b7d-c941eb5f589e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.733031] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 963.733031] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d64ede-ca59-c793-ec3e-57df397df7dd" [ 963.733031] env[65503]: _type = "Task" [ 963.733031] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.736833] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.743037] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d64ede-ca59-c793-ec3e-57df397df7dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.751821] env[65503]: DEBUG oslo_vmware.api [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4450360, 'name': PowerOffVM_Task, 'duration_secs': 0.49864} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.753263] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.753263] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.753263] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a59d44a4-4075-4a95-a466-58c9ecb0938f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.827209] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.827754] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.827754] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Deleting the datastore file [datastore1] b6cda94b-2894-4cf0-8522-6593df9723bd {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.827919] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19209463-16ba-4c75-9d07-00f5cf0df927 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.838950] env[65503]: DEBUG oslo_vmware.api [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for the task: (returnval){ [ 963.838950] env[65503]: value = "task-4450365" [ 963.838950] env[65503]: _type = "Task" [ 963.838950] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.849375] env[65503]: DEBUG oslo_vmware.api [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4450365, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.894201] env[65503]: WARNING neutronclient.v2_0.client [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.895124] env[65503]: WARNING openstack [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 963.895543] env[65503]: WARNING openstack [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 964.022229] env[65503]: DEBUG oslo_vmware.api [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Task: {'id': task-4450362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350736} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.022874] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.022874] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.023048] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.023313] env[65503]: INFO nova.compute.manager [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Took 1.17 seconds to destroy the instance on the hypervisor. [ 964.023835] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 964.023835] env[65503]: DEBUG nova.compute.manager [-] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 964.023835] env[65503]: DEBUG nova.network.neutron [-] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 964.024217] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 964.024999] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 964.025227] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 964.185520] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.245506] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d64ede-ca59-c793-ec3e-57df397df7dd, 'name': SearchDatastore_Task, 'duration_secs': 0.025799} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.249610] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54eea0e1-d90f-4e4f-91a6-4fc58b31c698 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.256553] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 964.256553] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f66465-193a-ca75-a443-0c732b7e1dd2" [ 964.256553] env[65503]: _type = "Task" [ 964.256553] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.269195] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f66465-193a-ca75-a443-0c732b7e1dd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.292759] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12eb8e83-f00b-4e57-89d4-249859ee7930 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.302127] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9480b427-0069-4d95-b0d3-b00c7c71aecf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.345197] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9299159e-4350-4706-b997-14263e8a6a36 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.358077] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8f6e0d-abcf-4826-b503-bdafc1fe75f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.362402] env[65503]: DEBUG oslo_vmware.api [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Task: {'id': task-4450365, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.405587} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.362707] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.363021] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.363087] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.363219] env[65503]: INFO nova.compute.manager [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 964.363537] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 964.364226] env[65503]: DEBUG nova.compute.manager [-] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 964.364291] env[65503]: DEBUG nova.network.neutron [-] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 964.364562] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 964.365122] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 964.365382] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 964.381930] env[65503]: DEBUG nova.compute.provider_tree [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.681813] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450363, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.768434] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f66465-193a-ca75-a443-0c732b7e1dd2, 'name': SearchDatastore_Task, 'duration_secs': 0.01196} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.768718] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.768985] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 8962b1b8-4875-4a1a-b231-36385755a976/8962b1b8-4875-4a1a-b231-36385755a976.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 964.769396] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84daf753-d8fa-4ece-87ef-dc0e5af5dfc9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.778436] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 964.778436] env[65503]: value = "task-4450366" [ 964.778436] env[65503]: _type = "Task" [ 964.778436] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.788183] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450366, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.884686] env[65503]: DEBUG nova.scheduler.client.report [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 965.191704] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450363, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.268182] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 965.293633] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450366, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.336319] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 965.389699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.390320] env[65503]: DEBUG nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 965.397227] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.811s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.398946] env[65503]: INFO nova.compute.claims [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.447956] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.448219] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.448359] env[65503]: DEBUG nova.network.neutron [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 965.600302] env[65503]: DEBUG nova.network.neutron [-] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 965.686239] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450363, 'name': ReconfigVM_Task, 'duration_secs': 1.593823} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.688037] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 81e27e9b-7d7e-4b04-8257-268660fd9ec3/81e27e9b-7d7e-4b04-8257-268660fd9ec3.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 965.688037] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f482e89f-32e8-4146-9e30-d6bb28a82529 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.689841] env[65503]: DEBUG oslo_concurrency.lockutils [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.690087] env[65503]: DEBUG oslo_concurrency.lockutils [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.690392] env[65503]: DEBUG nova.objects.instance [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'flavor' on Instance uuid 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.698338] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 965.698338] env[65503]: value = "task-4450367" [ 965.698338] env[65503]: _type = "Task" [ 965.698338] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.709320] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450367, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.791074] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450366, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.898361} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.791440] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 8962b1b8-4875-4a1a-b231-36385755a976/8962b1b8-4875-4a1a-b231-36385755a976.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.791700] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.791995] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a45ebb0-6057-4b31-8604-506fe13cb33b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.800978] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 965.800978] env[65503]: value = "task-4450368" [ 965.800978] env[65503]: _type = "Task" [ 965.800978] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.817274] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450368, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.907147] env[65503]: DEBUG nova.compute.utils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 965.914226] env[65503]: DEBUG nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 965.914813] env[65503]: DEBUG nova.network.neutron [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 965.914983] env[65503]: WARNING neutronclient.v2_0.client [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 965.915963] env[65503]: WARNING neutronclient.v2_0.client [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 965.916274] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 965.916723] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 965.952438] env[65503]: WARNING neutronclient.v2_0.client [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 965.953264] env[65503]: WARNING openstack [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 965.953736] env[65503]: WARNING openstack [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 966.104902] env[65503]: INFO nova.compute.manager [-] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Took 2.67 seconds to deallocate network for instance. [ 966.194288] env[65503]: WARNING neutronclient.v2_0.client [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 966.195061] env[65503]: WARNING openstack [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 966.195625] env[65503]: WARNING openstack [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 966.204185] env[65503]: DEBUG nova.objects.instance [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'pci_requests' on Instance uuid 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.219424] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450367, 'name': Rename_Task, 'duration_secs': 0.156532} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.220318] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.220629] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c403cf5b-ccb8-4143-a1b2-e96e0069328a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.232719] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 966.232719] env[65503]: value = "task-4450369" [ 966.232719] env[65503]: _type = "Task" [ 966.232719] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.248023] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.312783] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450368, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094531} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.313132] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.314310] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c812d2cb-a1d6-4215-897e-3e353056dd41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.347194] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 8962b1b8-4875-4a1a-b231-36385755a976/8962b1b8-4875-4a1a-b231-36385755a976.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.351503] env[65503]: WARNING openstack [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 966.351503] env[65503]: WARNING openstack [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 966.357174] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dfbc358-7378-4f95-a50b-ec10a47e401e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.379224] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 966.379224] env[65503]: value = "task-4450370" [ 966.379224] env[65503]: _type = "Task" [ 966.379224] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.391572] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.416572] env[65503]: DEBUG nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 966.430603] env[65503]: DEBUG nova.policy [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b05193bb9ea94f6383816667128913a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2add6a04793446c980e8310b24790764', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 966.585479] env[65503]: DEBUG nova.network.neutron [-] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 966.612798] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.707153] env[65503]: DEBUG nova.objects.base [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Object Instance<2a1587cd-8b47-439f-948c-d58a5dc8220e> lazy-loaded attributes: flavor,pci_requests {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 966.707804] env[65503]: DEBUG nova.network.neutron [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 966.708070] env[65503]: WARNING neutronclient.v2_0.client [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 966.708571] env[65503]: WARNING neutronclient.v2_0.client [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 966.709693] env[65503]: WARNING openstack [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 966.710373] env[65503]: WARNING openstack [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 966.744615] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.746346] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24588aed-903f-4752-a5f8-02c92121d3b5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.754451] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9084f135-3ec8-4649-8e05-628b2207d6b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.760404] env[65503]: WARNING neutronclient.v2_0.client [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 966.760404] env[65503]: WARNING openstack [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 966.760404] env[65503]: WARNING openstack [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 966.802304] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44979837-2a52-489a-927c-73c95c4f1e16 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.814146] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14041d5-4831-4015-9b51-3e0f27ab0e3b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.829931] env[65503]: DEBUG nova.compute.provider_tree [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.853570] env[65503]: DEBUG nova.network.neutron [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Successfully created port: 6b06aa59-3060-4bf3-8f9f-e8942a00fe50 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 966.869371] env[65503]: WARNING openstack [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 966.869776] env[65503]: WARNING openstack [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 966.890263] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450370, 'name': ReconfigVM_Task, 'duration_secs': 0.3152} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.895558] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 8962b1b8-4875-4a1a-b231-36385755a976/8962b1b8-4875-4a1a-b231-36385755a976.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.896653] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-849c4509-c696-496b-b61d-201090b5a56f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.905313] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 966.905313] env[65503]: value = "task-4450371" [ 966.905313] env[65503]: _type = "Task" [ 966.905313] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.918171] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450371, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.930033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-465070f5-b732-41ac-8f24-db7317f257a9 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.240s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.019971] env[65503]: DEBUG nova.network.neutron [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Updated VIF entry in instance network info cache for port 53fe187f-f12f-40ad-94fa-75bb9f0182ed. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 967.020343] env[65503]: DEBUG nova.network.neutron [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Updating instance_info_cache with network_info: [{"id": "53fe187f-f12f-40ad-94fa-75bb9f0182ed", "address": "fa:16:3e:8f:43:ac", "network": {"id": "d4cdc216-2fcb-4281-8227-0887797358f6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-21705326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd6e7f469b7d408fae0621171c096f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53fe187f-f1", "ovs_interfaceid": "53fe187f-f12f-40ad-94fa-75bb9f0182ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 967.049650] env[65503]: WARNING neutronclient.v2_0.client [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 967.050485] env[65503]: WARNING openstack [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 967.050908] env[65503]: WARNING openstack [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 967.091963] env[65503]: INFO nova.compute.manager [-] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Took 3.07 seconds to deallocate network for instance. [ 967.151673] env[65503]: DEBUG nova.network.neutron [-] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 967.206731] env[65503]: DEBUG nova.network.neutron [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73e98445-c9", "ovs_interfaceid": "73e98445-c951-4dc2-82e3-537e2196f82a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 967.248037] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.333867] env[65503]: DEBUG nova.scheduler.client.report [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 967.419635] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450371, 'name': Rename_Task, 'duration_secs': 0.176223} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.419635] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.419972] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7233b08c-aca7-4354-9401-88e4d2bb9a46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.427756] env[65503]: DEBUG nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 967.431021] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 967.431021] env[65503]: value = "task-4450372" [ 967.431021] env[65503]: _type = "Task" [ 967.431021] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.443311] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.458201] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 967.458354] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 967.458354] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 967.458537] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 967.458676] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 967.458817] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 967.459035] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.459207] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 967.459371] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 967.459575] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 967.459918] env[65503]: DEBUG nova.virt.hardware [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 967.460960] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e015517-5b1b-4bf2-9ddb-b015b6113fac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.473941] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b864e4-56c6-45c1-85c5-640d28677853 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.524326] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad54c4e9-790c-4152-a231-ee4df864c103 req-6faedd2a-8a17-49c0-85f9-30ac51cfdc28 service nova] Releasing lock "refresh_cache-8962b1b8-4875-4a1a-b231-36385755a976" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.598520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.655156] env[65503]: INFO nova.compute.manager [-] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Took 3.29 seconds to deallocate network for instance. [ 967.709608] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.735845] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='8f18cccd0b0f4ec7f9e8720062f55d92',container_format='bare',created_at=2025-11-14T15:52:58Z,direct_url=,disk_format='vmdk',id=b21b686c-8154-4b2b-9ac4-03e9bcfc50c2,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1562299448-shelved',owner='f5a588e741704449878e7a03d7892d11',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2025-11-14T15:53:14Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 967.736139] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 967.736310] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 967.736494] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 967.736638] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 967.736778] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 967.736984] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.737159] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 967.737320] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 967.737555] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 967.737751] env[65503]: DEBUG nova.virt.hardware [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 967.738832] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17501e5-5e5a-46c3-acd8-523ad355b3d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.754762] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db9573c-06ba-4d49-959a-2b5eb459a150 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.760642] env[65503]: DEBUG oslo_vmware.api [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450369, 'name': PowerOnVM_Task, 'duration_secs': 1.16265} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.761960] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 967.761960] env[65503]: INFO nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Took 10.90 seconds to spawn the instance on the hypervisor. [ 967.761960] env[65503]: DEBUG nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 967.762651] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575f2053-5791-4606-b1e3-4e9dd8ba26f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.777895] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:11:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73e98445-c951-4dc2-82e3-537e2196f82a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.788116] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 967.789456] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 967.789804] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cc387d1-cf72-47da-ab4b-13e16ab32e08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.817614] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.817614] env[65503]: value = "task-4450373" [ 967.817614] env[65503]: _type = "Task" [ 967.817614] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.829659] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450373, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.839824] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.840760] env[65503]: DEBUG nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 967.843421] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 11.009s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.944034] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450372, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.000573] env[65503]: DEBUG nova.compute.manager [req-1a495ac9-34ca-4305-929d-3757dd17381a req-0030424b-c9d1-4c8e-82bb-fcf062a7d1d1 service nova] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Received event network-vif-deleted-c1eaff02-bbf6-47d2-9655-443ca8021c5a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 968.022440] env[65503]: DEBUG nova.compute.manager [req-e22efbd3-de21-4e1f-ad6c-f3530546e884 req-fd8bfde3-7021-4e7e-b696-60a85cbbdf7d service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received event network-vif-plugged-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 968.022803] env[65503]: DEBUG oslo_concurrency.lockutils [req-e22efbd3-de21-4e1f-ad6c-f3530546e884 req-fd8bfde3-7021-4e7e-b696-60a85cbbdf7d service nova] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.023053] env[65503]: DEBUG oslo_concurrency.lockutils [req-e22efbd3-de21-4e1f-ad6c-f3530546e884 req-fd8bfde3-7021-4e7e-b696-60a85cbbdf7d service nova] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.023301] env[65503]: DEBUG oslo_concurrency.lockutils [req-e22efbd3-de21-4e1f-ad6c-f3530546e884 req-fd8bfde3-7021-4e7e-b696-60a85cbbdf7d service nova] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.023556] env[65503]: DEBUG nova.compute.manager [req-e22efbd3-de21-4e1f-ad6c-f3530546e884 req-fd8bfde3-7021-4e7e-b696-60a85cbbdf7d service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] No waiting events found dispatching network-vif-plugged-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 968.023800] env[65503]: WARNING nova.compute.manager [req-e22efbd3-de21-4e1f-ad6c-f3530546e884 req-fd8bfde3-7021-4e7e-b696-60a85cbbdf7d service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received unexpected event network-vif-plugged-73e98445-c951-4dc2-82e3-537e2196f82a for instance with vm_state shelved_offloaded and task_state spawning. [ 968.164336] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.332325] env[65503]: INFO nova.compute.manager [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Took 34.12 seconds to build instance. [ 968.342157] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450373, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.347050] env[65503]: DEBUG nova.compute.utils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 968.357400] env[65503]: DEBUG nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 968.357628] env[65503]: DEBUG nova.network.neutron [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 968.358578] env[65503]: WARNING neutronclient.v2_0.client [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 968.359029] env[65503]: WARNING neutronclient.v2_0.client [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 968.359661] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 968.360030] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 968.445332] env[65503]: DEBUG oslo_vmware.api [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450372, 'name': PowerOnVM_Task, 'duration_secs': 0.705803} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.445332] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.445332] env[65503]: INFO nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Took 9.68 seconds to spawn the instance on the hypervisor. [ 968.446028] env[65503]: DEBUG nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 968.446972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a3639c-1e50-471f-8b20-fda95f2a6f7e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.483818] env[65503]: DEBUG nova.policy [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5203efa0354baca5354d76cf3365c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf54348a3d0948cfa816cc3746e86806', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 968.535214] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "a043a8e2-8661-4d80-939d-8e7b02b0459f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.535214] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.639781] env[65503]: DEBUG nova.network.neutron [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Successfully updated port: 6b06aa59-3060-4bf3-8f9f-e8942a00fe50 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 968.831505] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450373, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.844147] env[65503]: DEBUG oslo_concurrency.lockutils [None req-175a7b8a-e64f-48a9-bc58-8dc51d825193 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.639s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.850131] env[65503]: DEBUG nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 968.914638] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b6cda94b-2894-4cf0-8522-6593df9723bd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 968.914813] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a57486e1-82e3-48d5-99fe-c89b300a2136 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.914930] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e4c1c94b-744f-4bed-8e68-3b3f9de7db44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.915071] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 31ee1061-6199-4341-86ab-9ae606b269fe is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 968.915181] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.915296] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 909b3535-9410-4820-a34d-6c0e9627f506 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 968.915454] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2a1587cd-8b47-439f-948c-d58a5dc8220e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.915536] env[65503]: WARNING nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 708ed8ab-0ec9-457c-966d-b11c55895981 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 968.915735] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 3b31611f-84f9-4bf6-8e26-f64db06d15ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.915812] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ff256d3f-af88-4f01-bdfd-cf89e06ab364 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.915854] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 81e27e9b-7d7e-4b04-8257-268660fd9ec3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.915945] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 8962b1b8-4875-4a1a-b231-36385755a976 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.916063] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.916178] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 82415443-1884-4898-996e-828d23f67f23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 968.959240] env[65503]: DEBUG nova.network.neutron [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Successfully created port: d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 968.973154] env[65503]: INFO nova.compute.manager [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Took 31.12 seconds to build instance. [ 969.037959] env[65503]: DEBUG nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 969.147924] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "refresh_cache-b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.148136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquired lock "refresh_cache-b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.148401] env[65503]: DEBUG nova.network.neutron [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 969.288390] env[65503]: DEBUG nova.compute.manager [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 969.289665] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec34a992-2f9b-46ee-8ab4-caca1f2abb5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.330290] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450373, 'name': CreateVM_Task, 'duration_secs': 1.113664} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.330290] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 969.330465] env[65503]: WARNING neutronclient.v2_0.client [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 969.331166] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.331166] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.331334] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 969.332309] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15e22f66-c2d7-4595-9c12-820ccc49cfad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.339117] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 969.339117] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e302a0-12a0-fcdb-e953-68ed4b1ba37e" [ 969.339117] env[65503]: _type = "Task" [ 969.339117] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.348977] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e302a0-12a0-fcdb-e953-68ed4b1ba37e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.420271] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a043a8e2-8661-4d80-939d-8e7b02b0459f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 969.420572] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 969.421090] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '10', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '4', 'num_os_type_None': '10', 'num_proj_f5a588e741704449878e7a03d7892d11': '1', 'io_workload': '4', 'num_vm_active': '4', 'num_task_None': '5', 'num_proj_34e8cd66745a40d2acebbce98050ee5d': '1', 'num_proj_19e0e62fe31a46fc802dbfc625ac7645': '1', 'num_proj_3658921b747e4d78a2046b838cb36d26': '1', 'num_proj_be67f50c5bc447309d4c04f3f2805455': '1', 'num_vm_stopped': '1', 'num_task_image_uploading': '1', 'num_proj_8c5b0c3771b5434992cd58e1af539bde': '1', 'num_vm_building': '4', 'num_proj_463e93d05e1e4b27a3dc866a5b1991d0': '1', 'num_proj_fd6e7f469b7d408fae0621171c096f0a': '1', 'num_proj_2add6a04793446c980e8310b24790764': '1', 'num_proj_bf54348a3d0948cfa816cc3746e86806': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 969.477122] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c4c3437-6e27-4b58-bf8e-a4955fdcd000 tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "8962b1b8-4875-4a1a-b231-36385755a976" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.629s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.543694] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "e37758cc-7287-4271-ad47-d711201d0add" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.544061] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "e37758cc-7287-4271-ad47-d711201d0add" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.571212] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.640183] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.640439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.640748] env[65503]: DEBUG nova.objects.instance [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'flavor' on Instance uuid 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.651426] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 969.654047] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 969.675101] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c940638-7a25-4a31-b369-849b1495b0a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.684030] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6038a1ce-7822-44e0-8716-c75684b80fd1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.689085] env[65503]: DEBUG nova.network.neutron [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 969.717853] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01fb4ba-e150-4c12-b4f4-654e049b2573 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.721820] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 969.722239] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 969.734639] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cbcad7-f4df-4e1a-a5c7-f497b76e65f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.749870] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.801417] env[65503]: INFO nova.compute.manager [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] instance snapshotting [ 969.802166] env[65503]: DEBUG nova.objects.instance [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'flavor' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.804734] env[65503]: WARNING neutronclient.v2_0.client [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 969.805495] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 969.805938] env[65503]: WARNING openstack [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 969.851066] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.851609] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Processing image b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.851609] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.851805] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.851999] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.852285] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-702b1590-7bdb-48de-90a6-0484de1becc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.865258] env[65503]: DEBUG nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 969.868680] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.868853] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.869614] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-148ca1c1-ecfd-46de-b65f-9d0bba9abd8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.877184] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 969.877184] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52484a8e-2493-39fa-98ba-cd78caea0fcd" [ 969.877184] env[65503]: _type = "Task" [ 969.877184] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.892437] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52484a8e-2493-39fa-98ba-cd78caea0fcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.899216] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 969.899480] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.899636] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 969.899819] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.899959] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 969.900112] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 969.900318] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 969.900471] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 969.900632] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 969.900788] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 969.900956] env[65503]: DEBUG nova.virt.hardware [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 969.902396] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e14f5f8-179a-4ed6-bb28-2a7fdfc0dcad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.907668] env[65503]: DEBUG nova.network.neutron [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Updating instance_info_cache with network_info: [{"id": "6b06aa59-3060-4bf3-8f9f-e8942a00fe50", "address": "fa:16:3e:d9:bd:5a", "network": {"id": "5eb631b4-f015-489c-9e66-301fd4cbbe6f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1557128008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2add6a04793446c980e8310b24790764", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b06aa59-30", "ovs_interfaceid": "6b06aa59-3060-4bf3-8f9f-e8942a00fe50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 969.915881] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2df587c-236b-4208-a928-fa8e296bad61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.052726] env[65503]: DEBUG nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 970.144571] env[65503]: WARNING neutronclient.v2_0.client [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 970.145402] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 970.145817] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 970.259144] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.304126] env[65503]: DEBUG nova.objects.instance [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'pci_requests' on Instance uuid 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.318776] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4802bd-9645-4f1d-9cc8-3c2dcae58dd3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.343692] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b866bf22-cdd1-4899-96ea-b27911b23569 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.391412] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Preparing fetch location {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 970.391712] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Fetch image to [datastore2] OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5/OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5.vmdk {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 970.392087] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Downloading stream optimized image b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 to [datastore2] OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5/OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5.vmdk on the data store datastore2 as vApp {{(pid=65503) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 970.392087] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Downloading image file data b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 to the ESX as VM named 'OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5' {{(pid=65503) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 970.394853] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.395120] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.395384] env[65503]: DEBUG nova.compute.manager [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 970.397012] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfa99d2-494e-4bdb-a36f-b6f4beb7adc8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.406584] env[65503]: DEBUG nova.compute.manager [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 970.407382] env[65503]: DEBUG nova.objects.instance [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'flavor' on Instance uuid 81e27e9b-7d7e-4b04-8257-268660fd9ec3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.409893] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Releasing lock "refresh_cache-b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.413039] env[65503]: DEBUG nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Instance network_info: |[{"id": "6b06aa59-3060-4bf3-8f9f-e8942a00fe50", "address": "fa:16:3e:d9:bd:5a", "network": {"id": "5eb631b4-f015-489c-9e66-301fd4cbbe6f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1557128008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2add6a04793446c980e8310b24790764", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b06aa59-30", "ovs_interfaceid": "6b06aa59-3060-4bf3-8f9f-e8942a00fe50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 970.413039] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:bd:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ebd8af-aaf6-4d04-b869-3882e2571ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b06aa59-3060-4bf3-8f9f-e8942a00fe50', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.420153] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Creating folder: Project (2add6a04793446c980e8310b24790764). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.423241] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b15a9d8-16cc-40fc-9339-4360f3f9fe46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.449802] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Created folder: Project (2add6a04793446c980e8310b24790764) in parent group-v870190. [ 970.449975] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Creating folder: Instances. Parent ref: group-v870422. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.450237] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32ec46f0-9718-4bfe-b8a7-cd73c35f94a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.466115] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Created folder: Instances in parent group-v870422. [ 970.466417] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 970.466634] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.466858] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7843c634-d1bd-47de-bd82-5ef300f7df05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.488687] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 970.488687] env[65503]: value = "resgroup-9" [ 970.488687] env[65503]: _type = "ResourcePool" [ 970.488687] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 970.489055] env[65503]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f4a3e4ef-d688-4f59-8548-962f252a9e42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.505716] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.505716] env[65503]: value = "task-4450376" [ 970.505716] env[65503]: _type = "Task" [ 970.505716] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.513149] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lease: (returnval){ [ 970.513149] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260b83b-b348-241f-4755-6f165a271641" [ 970.513149] env[65503]: _type = "HttpNfcLease" [ 970.513149] env[65503]: } obtained for vApp import into resource pool (val){ [ 970.513149] env[65503]: value = "resgroup-9" [ 970.513149] env[65503]: _type = "ResourcePool" [ 970.513149] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 970.513627] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the lease: (returnval){ [ 970.513627] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260b83b-b348-241f-4755-6f165a271641" [ 970.513627] env[65503]: _type = "HttpNfcLease" [ 970.513627] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 970.517530] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450376, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.525360] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 970.525360] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260b83b-b348-241f-4755-6f165a271641" [ 970.525360] env[65503]: _type = "HttpNfcLease" [ 970.525360] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 970.574488] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.612596] env[65503]: DEBUG nova.network.neutron [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Successfully updated port: d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 970.711767] env[65503]: DEBUG nova.compute.manager [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Received event network-vif-plugged-6b06aa59-3060-4bf3-8f9f-e8942a00fe50 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 970.711977] env[65503]: DEBUG oslo_concurrency.lockutils [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Acquiring lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.712710] env[65503]: DEBUG oslo_concurrency.lockutils [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.712710] env[65503]: DEBUG oslo_concurrency.lockutils [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.712710] env[65503]: DEBUG nova.compute.manager [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] No waiting events found dispatching network-vif-plugged-6b06aa59-3060-4bf3-8f9f-e8942a00fe50 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 970.712993] env[65503]: WARNING nova.compute.manager [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Received unexpected event network-vif-plugged-6b06aa59-3060-4bf3-8f9f-e8942a00fe50 for instance with vm_state building and task_state spawning. [ 970.713064] env[65503]: DEBUG nova.compute.manager [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Received event network-changed-6b06aa59-3060-4bf3-8f9f-e8942a00fe50 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 970.713282] env[65503]: DEBUG nova.compute.manager [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Refreshing instance network info cache due to event network-changed-6b06aa59-3060-4bf3-8f9f-e8942a00fe50. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 970.713392] env[65503]: DEBUG oslo_concurrency.lockutils [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Acquiring lock "refresh_cache-b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.713561] env[65503]: DEBUG oslo_concurrency.lockutils [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Acquired lock "refresh_cache-b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.713685] env[65503]: DEBUG nova.network.neutron [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Refreshing network info cache for port 6b06aa59-3060-4bf3-8f9f-e8942a00fe50 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 970.740900] env[65503]: DEBUG nova.compute.manager [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received event network-changed-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 970.741053] env[65503]: DEBUG nova.compute.manager [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Refreshing instance network info cache due to event network-changed-73e98445-c951-4dc2-82e3-537e2196f82a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 970.741327] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Acquiring lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.741514] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Acquired lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.741805] env[65503]: DEBUG nova.network.neutron [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Refreshing network info cache for port 73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 970.766452] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 970.766592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.923s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.766828] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.536s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.767042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.769881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.158s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.769881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.771858] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.174s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.772179] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.774111] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.610s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.774296] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.776060] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.205s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.777695] env[65503]: INFO nova.compute.claims [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 970.808303] env[65503]: DEBUG nova.objects.base [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Object Instance<2a1587cd-8b47-439f-948c-d58a5dc8220e> lazy-loaded attributes: flavor,pci_requests {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 970.808303] env[65503]: DEBUG nova.network.neutron [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 970.809204] env[65503]: WARNING neutronclient.v2_0.client [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 970.809204] env[65503]: WARNING neutronclient.v2_0.client [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 970.811036] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 970.811036] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 970.826871] env[65503]: INFO nova.scheduler.client.report [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Deleted allocations for instance 31ee1061-6199-4341-86ab-9ae606b269fe [ 970.832443] env[65503]: INFO nova.scheduler.client.report [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Deleted allocations for instance b6cda94b-2894-4cf0-8522-6593df9723bd [ 970.853017] env[65503]: INFO nova.scheduler.client.report [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted allocations for instance 708ed8ab-0ec9-457c-966d-b11c55895981 [ 970.861757] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 970.865200] env[65503]: INFO nova.scheduler.client.report [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted allocations for instance 909b3535-9410-4820-a34d-6c0e9627f506 [ 970.867761] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3d0c5885-df47-4e99-ae3b-05d4239c66d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.872390] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "8962b1b8-4875-4a1a-b231-36385755a976" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.872874] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "8962b1b8-4875-4a1a-b231-36385755a976" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.873142] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "8962b1b8-4875-4a1a-b231-36385755a976-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.873347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "8962b1b8-4875-4a1a-b231-36385755a976-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.873541] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "8962b1b8-4875-4a1a-b231-36385755a976-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.881070] env[65503]: INFO nova.compute.manager [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Terminating instance [ 970.892233] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 970.892233] env[65503]: value = "task-4450378" [ 970.892233] env[65503]: _type = "Task" [ 970.892233] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.902982] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450378, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.907612] env[65503]: DEBUG nova.policy [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 970.922235] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b1c36a-635c-0458-58d2-31529f050ec1/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 970.923532] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80afae9-758c-42b3-8163-a9c1d77cd15f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.932408] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b1c36a-635c-0458-58d2-31529f050ec1/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 970.932616] env[65503]: ERROR oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b1c36a-635c-0458-58d2-31529f050ec1/disk-0.vmdk due to incomplete transfer. [ 970.932888] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4afe759d-0115-413d-b047-684e02877817 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.944591] env[65503]: DEBUG oslo_vmware.rw_handles [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b1c36a-635c-0458-58d2-31529f050ec1/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 970.944816] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Uploaded image f47b707b-e103-4bed-9885-087a7eda64cd to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 970.949110] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 970.949683] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-26cd6eb0-a408-48e1-af5f-cac8618b3986 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.960158] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 970.960158] env[65503]: value = "task-4450379" [ 970.960158] env[65503]: _type = "Task" [ 970.960158] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.972840] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450379, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.991146] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.991503] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.031753] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450376, 'name': CreateVM_Task, 'duration_secs': 0.461987} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.032623] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 971.033425] env[65503]: WARNING neutronclient.v2_0.client [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 971.033965] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.034262] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.034726] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 971.037429] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a019ceb-fcb6-4edc-8c74-13fd96a2f6dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.039326] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 971.039326] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260b83b-b348-241f-4755-6f165a271641" [ 971.039326] env[65503]: _type = "HttpNfcLease" [ 971.039326] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 971.044312] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 971.044312] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52718c1a-ddff-53e5-1ac0-486c4672c48a" [ 971.044312] env[65503]: _type = "Task" [ 971.044312] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.056511] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52718c1a-ddff-53e5-1ac0-486c4672c48a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.117353] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.117624] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.117840] env[65503]: DEBUG nova.network.neutron [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 971.216093] env[65503]: WARNING neutronclient.v2_0.client [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 971.216836] env[65503]: WARNING openstack [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.217763] env[65503]: WARNING openstack [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.244971] env[65503]: WARNING neutronclient.v2_0.client [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 971.247029] env[65503]: WARNING openstack [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.247029] env[65503]: WARNING openstack [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.348188] env[65503]: DEBUG oslo_concurrency.lockutils [None req-60f5a812-88bf-4737-b87b-c219d60dd4d8 tempest-ServersTestManualDisk-281688340 tempest-ServersTestManualDisk-281688340-project-member] Lock "31ee1061-6199-4341-86ab-9ae606b269fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.015s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.350408] env[65503]: DEBUG nova.network.neutron [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Successfully created port: 995d778e-470e-4cfa-bb9e-b83d9780f51a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 971.352753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-71817f6c-f54d-4314-8908-337630d8fe7a tempest-ServersWithSpecificFlavorTestJSON-1066645637 tempest-ServersWithSpecificFlavorTestJSON-1066645637-project-member] Lock "b6cda94b-2894-4cf0-8522-6593df9723bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.642s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.370094] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5c31cded-f27a-4df7-af12-27475a204ca2 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "708ed8ab-0ec9-457c-966d-b11c55895981" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.395s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.384257] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfd65b90-7043-4ae3-b5f3-e3dae5cd99fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "909b3535-9410-4820-a34d-6c0e9627f506" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.631s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.395160] env[65503]: DEBUG nova.compute.manager [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 971.395375] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 971.397380] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58039e6-2409-43cb-8676-090bb57880d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.410842] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450378, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.414235] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.414574] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59251975-d494-41de-86c4-54775cdadb20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.418324] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.418630] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7278dd8b-78bb-46f3-ae66-d45a9a339f74 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.428065] env[65503]: DEBUG oslo_vmware.api [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 971.428065] env[65503]: value = "task-4450380" [ 971.428065] env[65503]: _type = "Task" [ 971.428065] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.434516] env[65503]: DEBUG oslo_vmware.api [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 971.434516] env[65503]: value = "task-4450381" [ 971.434516] env[65503]: _type = "Task" [ 971.434516] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.443795] env[65503]: DEBUG oslo_vmware.api [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.450410] env[65503]: DEBUG oslo_vmware.api [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.472296] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450379, 'name': Destroy_Task, 'duration_secs': 0.391573} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.472862] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Destroyed the VM [ 971.473307] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 971.473812] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7eb9e167-83a0-4faa-964f-fa905d8732b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.477700] env[65503]: WARNING openstack [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.478229] env[65503]: WARNING openstack [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.495729] env[65503]: DEBUG nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 971.499298] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 971.499298] env[65503]: value = "task-4450382" [ 971.499298] env[65503]: _type = "Task" [ 971.499298] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.500644] env[65503]: WARNING openstack [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.501193] env[65503]: WARNING openstack [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.523030] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450382, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.538728] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 971.538728] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260b83b-b348-241f-4755-6f165a271641" [ 971.538728] env[65503]: _type = "HttpNfcLease" [ 971.538728] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 971.539130] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 971.539130] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5260b83b-b348-241f-4755-6f165a271641" [ 971.539130] env[65503]: _type = "HttpNfcLease" [ 971.539130] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 971.540205] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc7d679-eff8-4b6d-b119-50d83ed92818 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.556062] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274f41e-e49a-c848-edec-17c8ee5ac02b/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 971.556231] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274f41e-e49a-c848-edec-17c8ee5ac02b/disk-0.vmdk. {{(pid=65503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 971.561887] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52718c1a-ddff-53e5-1ac0-486c4672c48a, 'name': SearchDatastore_Task, 'duration_secs': 0.030995} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.563313] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.563685] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.563872] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.564032] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.564213] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.628276] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42b1cc92-7546-48b5-aeff-8651595f9921 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.631194] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.631332] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.639775] env[65503]: WARNING neutronclient.v2_0.client [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 971.640423] env[65503]: WARNING openstack [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.640765] env[65503]: WARNING openstack [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.655723] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3764a692-7c52-47a1-9eda-4559fd3de466 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.665206] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.665417] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 971.666341] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3acea90-fd95-4a1d-af16-69741e866b4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.674225] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 971.674225] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df923b-485b-b57e-adb4-24cac6369190" [ 971.674225] env[65503]: _type = "Task" [ 971.674225] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.685622] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df923b-485b-b57e-adb4-24cac6369190, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.816717] env[65503]: DEBUG nova.network.neutron [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 971.842784] env[65503]: WARNING neutronclient.v2_0.client [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 971.843596] env[65503]: WARNING openstack [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.844562] env[65503]: WARNING openstack [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.912168] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450378, 'name': CreateSnapshot_Task, 'duration_secs': 0.914161} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.915546] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 971.922122] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d7f267-31be-4624-be3c-71352a5c6554 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.928967] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.929400] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.985895] env[65503]: DEBUG oslo_vmware.api [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450380, 'name': PowerOffVM_Task, 'duration_secs': 0.2328} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.985895] env[65503]: DEBUG oslo_vmware.api [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450381, 'name': PowerOffVM_Task, 'duration_secs': 0.196788} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.991542] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.991930] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 971.992379] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.992379] env[65503]: DEBUG nova.compute.manager [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 971.992745] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4ca8901-1e5c-4d4d-8a60-b97a75b2c48d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.996148] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18779eff-ee7e-49fe-965f-7388b463d9d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.012492] env[65503]: DEBUG nova.network.neutron [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Updated VIF entry in instance network info cache for port 6b06aa59-3060-4bf3-8f9f-e8942a00fe50. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 972.012940] env[65503]: DEBUG nova.network.neutron [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Updating instance_info_cache with network_info: [{"id": "6b06aa59-3060-4bf3-8f9f-e8942a00fe50", "address": "fa:16:3e:d9:bd:5a", "network": {"id": "5eb631b4-f015-489c-9e66-301fd4cbbe6f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1557128008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2add6a04793446c980e8310b24790764", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b06aa59-30", "ovs_interfaceid": "6b06aa59-3060-4bf3-8f9f-e8942a00fe50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 972.043645] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450382, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.051443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.064679] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d658f42-edd9-4c79-a2a6-04472c7ec9aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.076446] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40511780-61a7-4de0-a9a4-fb1d84aefe6d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.080861] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.081171] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.081360] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleting the datastore file [datastore1] 8962b1b8-4875-4a1a-b231-36385755a976 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.084826] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-701f0166-7d55-4a0b-a08e-bcc183b51e01 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.125708] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c835a4-dbb0-4f9b-8fb9-0bf1ac247c48 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.128946] env[65503]: DEBUG oslo_vmware.api [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for the task: (returnval){ [ 972.128946] env[65503]: value = "task-4450384" [ 972.128946] env[65503]: _type = "Task" [ 972.128946] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.139297] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed1e040-94cb-41ac-bbc5-679837d2ebbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.149810] env[65503]: DEBUG oslo_vmware.api [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.162194] env[65503]: DEBUG nova.compute.provider_tree [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.185658] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52df923b-485b-b57e-adb4-24cac6369190, 'name': SearchDatastore_Task, 'duration_secs': 0.013489} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.186742] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f7f08a3-e63a-4072-b1ed-bf9913418822 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.194105] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 972.194105] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5280aa32-f3b1-10d1-30a8-b5af70e246f1" [ 972.194105] env[65503]: _type = "Task" [ 972.194105] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.204018] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5280aa32-f3b1-10d1-30a8-b5af70e246f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.349786] env[65503]: DEBUG nova.network.neutron [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updated VIF entry in instance network info cache for port 73e98445-c951-4dc2-82e3-537e2196f82a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 972.350215] env[65503]: DEBUG nova.network.neutron [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73e98445-c9", "ovs_interfaceid": "73e98445-c951-4dc2-82e3-537e2196f82a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 972.477703] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 972.480631] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4b05d3ac-9053-4390-a67e-8eb5e5833f84 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.490932] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 972.490932] env[65503]: value = "task-4450385" [ 972.490932] env[65503]: _type = "Task" [ 972.490932] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.502206] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450385, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.530337] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8991b96b-e3e9-4555-b8b3-e7846cabe5f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.135s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.535025] env[65503]: DEBUG oslo_concurrency.lockutils [req-fdbfd82d-947a-498e-bda8-00117ac06614 req-0bc5eaef-293c-44c1-b3eb-157551961b46 service nova] Releasing lock "refresh_cache-b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.539522] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450382, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.640597] env[65503]: DEBUG oslo_vmware.api [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Task: {'id': task-4450384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143916} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.640886] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 972.641088] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 972.641271] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 972.641465] env[65503]: INFO nova.compute.manager [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Took 1.25 seconds to destroy the instance on the hypervisor. [ 972.641706] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 972.641915] env[65503]: DEBUG nova.compute.manager [-] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 972.642025] env[65503]: DEBUG nova.network.neutron [-] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 972.642291] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 972.642874] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 972.643163] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 972.667979] env[65503]: DEBUG nova.scheduler.client.report [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.708377] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5280aa32-f3b1-10d1-30a8-b5af70e246f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011215} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.708818] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.709162] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14/b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 972.711111] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cdf09d0-7560-4871-822b-e06e695e1f2b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.720877] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 972.720877] env[65503]: value = "task-4450386" [ 972.720877] env[65503]: _type = "Task" [ 972.720877] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.733115] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.854234] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] Releasing lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.854646] env[65503]: DEBUG nova.compute.manager [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Received event network-vif-deleted-db676209-2c15-4a3f-9346-21566b03a82d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 972.854790] env[65503]: DEBUG nova.compute.manager [req-b6b661d0-00ee-4bff-9e8a-92c39ecbc29a req-193c23a9-7c4a-4c84-83c5-cb1ce841b2d9 service nova] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Received event network-vif-deleted-f5e5c61e-0df6-47d9-ab15-21c80b68c833 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 973.010556] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450385, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.036933] env[65503]: DEBUG oslo_vmware.api [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450382, 'name': RemoveSnapshot_Task, 'duration_secs': 1.430852} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.039507] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 973.039837] env[65503]: INFO nova.compute.manager [None req-3aea8e88-7b50-476f-bd63-326ecbab2287 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Took 16.30 seconds to snapshot the instance on the hypervisor. [ 973.056727] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Completed reading data from the image iterator. {{(pid=65503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 973.057066] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274f41e-e49a-c848-edec-17c8ee5ac02b/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 973.058307] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0e4e42-a902-47bc-8524-be04c44599bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.068671] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274f41e-e49a-c848-edec-17c8ee5ac02b/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 973.068941] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274f41e-e49a-c848-edec-17c8ee5ac02b/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 973.069545] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-856e9e7d-b5f9-4c5c-97fc-01eccd410c98 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.084631] env[65503]: DEBUG nova.network.neutron [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Successfully updated port: 995d778e-470e-4cfa-bb9e-b83d9780f51a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 973.117894] env[65503]: WARNING neutronclient.v2_0.client [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 973.118786] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 973.119360] env[65503]: WARNING openstack [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 973.174426] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.175173] env[65503]: DEBUG nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 973.182502] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.608s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.184551] env[65503]: INFO nova.compute.claims [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.227737] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 973.238228] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450386, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.332777] env[65503]: DEBUG oslo_vmware.rw_handles [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274f41e-e49a-c848-edec-17c8ee5ac02b/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 973.333208] env[65503]: INFO nova.virt.vmwareapi.images [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Downloaded image file data b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 [ 973.334638] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4754e3b7-3768-464d-8b03-3a4a0064000c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.356886] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af9b6fb7-8f84-4c12-a0e4-97acd9d3e7ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.402024] env[65503]: INFO nova.virt.vmwareapi.images [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] The imported VM was unregistered [ 973.403874] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Caching image {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 973.404376] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Creating directory with path [datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.405224] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-472ee82c-1466-4563-9396-cf986f2a013b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.421235] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Created directory with path [datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.421235] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5/OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5.vmdk to [datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2.vmdk. {{(pid=65503) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 973.421235] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-049c4f16-2f26-4367-a338-8930ce46e89c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.430755] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 973.430755] env[65503]: value = "task-4450388" [ 973.430755] env[65503]: _type = "Task" [ 973.430755] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.440213] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450388, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.506813] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450385, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.589880] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.589880] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.589880] env[65503]: DEBUG nova.network.neutron [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 973.631062] env[65503]: DEBUG nova.network.neutron [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance_info_cache with network_info: [{"id": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "address": "fa:16:3e:6c:9a:41", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e6ffe6-e1", "ovs_interfaceid": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 973.691581] env[65503]: DEBUG nova.compute.utils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 973.696315] env[65503]: DEBUG nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 973.696315] env[65503]: DEBUG nova.network.neutron [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 973.696315] env[65503]: WARNING neutronclient.v2_0.client [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 973.696628] env[65503]: WARNING neutronclient.v2_0.client [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 973.697122] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 973.697460] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 973.739905] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450386, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605935} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.740262] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14/b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 973.740494] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 973.740809] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce0c31e0-68d6-4e68-8694-b06a395a699d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.751499] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 973.751499] env[65503]: value = "task-4450389" [ 973.751499] env[65503]: _type = "Task" [ 973.751499] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.766553] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.842383] env[65503]: DEBUG nova.policy [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39128a640d8e47f1bd527d875b79bc6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34a784de8c744d988015e75cf157103e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 973.949119] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450388, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.011323] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450385, 'name': CloneVM_Task, 'duration_secs': 1.489374} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.011657] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Created linked-clone VM from snapshot [ 974.012587] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097006a2-0bbf-473f-9a1f-64bf8bb0e657 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.025464] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Uploading image 0f5177f1-4307-452a-b134-570a3098c1b9 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 974.056111] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 974.056111] env[65503]: value = "vm-870427" [ 974.056111] env[65503]: _type = "VirtualMachine" [ 974.056111] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 974.056477] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5bfe51a1-4329-4fe2-b30d-fbaec79faacb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.068677] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease: (returnval){ [ 974.068677] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d89249-d9aa-d24e-5c57-1fd2264b0dbc" [ 974.068677] env[65503]: _type = "HttpNfcLease" [ 974.068677] env[65503]: } obtained for exporting VM: (result){ [ 974.068677] env[65503]: value = "vm-870427" [ 974.068677] env[65503]: _type = "VirtualMachine" [ 974.068677] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 974.071137] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the lease: (returnval){ [ 974.071137] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d89249-d9aa-d24e-5c57-1fd2264b0dbc" [ 974.071137] env[65503]: _type = "HttpNfcLease" [ 974.071137] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 974.085015] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 974.085015] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d89249-d9aa-d24e-5c57-1fd2264b0dbc" [ 974.085015] env[65503]: _type = "HttpNfcLease" [ 974.085015] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 974.085540] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 974.085540] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d89249-d9aa-d24e-5c57-1fd2264b0dbc" [ 974.085540] env[65503]: _type = "HttpNfcLease" [ 974.085540] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 974.086500] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878cf616-5b63-49b8-a9f3-11d51431d61d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.098615] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 974.099077] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 974.109319] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a2263-d7fa-63f2-76d6-6282b0435403/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 974.109807] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a2263-d7fa-63f2-76d6-6282b0435403/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 974.170018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.170508] env[65503]: DEBUG nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Instance network_info: |[{"id": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "address": "fa:16:3e:6c:9a:41", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e6ffe6-e1", "ovs_interfaceid": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 974.171047] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:9a:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.179296] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 974.180951] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82415443-1884-4898-996e-828d23f67f23] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.181592] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65cf464b-f3d8-476c-a76c-79d055c47a82 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.199845] env[65503]: DEBUG nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 974.212959] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.212959] env[65503]: value = "task-4450391" [ 974.212959] env[65503]: _type = "Task" [ 974.212959] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.226811] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450391, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.232791] env[65503]: DEBUG nova.network.neutron [-] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 974.241921] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-62ee050f-a528-4321-b816-6c59749fd953 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.270605] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093908} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.274357] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.278514] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb80a8a-7f52-43cd-b65d-12f4d1dea6bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.309197] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14/b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.313788] env[65503]: DEBUG nova.network.neutron [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Successfully created port: c5073b44-93ec-4c66-9634-09aeaf801426 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 974.318895] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e9992f9-ff43-419c-9dac-4ff58357c2cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.351070] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 974.351070] env[65503]: value = "task-4450392" [ 974.351070] env[65503]: _type = "Task" [ 974.351070] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.364535] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450392, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.436597] env[65503]: WARNING nova.network.neutron [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] d2c28c8e-55de-416b-97e1-c5ea06e7f107 already exists in list: networks containing: ['d2c28c8e-55de-416b-97e1-c5ea06e7f107']. ignoring it [ 974.451780] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450388, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.523329] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a9b98e-3a65-44bb-8a56-015df46fe953 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.537814] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a29f15-197b-46d0-b802-b519aa8a8f11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.588682] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8aee5e0-8ee5-4aca-b896-fa017dbb6d3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.599244] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b05f60f-3254-41b6-bde6-b7b4627941da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.620503] env[65503]: DEBUG nova.compute.provider_tree [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.658640] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 974.658932] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 974.731844] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450391, 'name': CreateVM_Task, 'duration_secs': 0.405422} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.733916] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82415443-1884-4898-996e-828d23f67f23] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.734290] env[65503]: WARNING neutronclient.v2_0.client [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 974.736019] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.736019] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.736019] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 974.736019] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-961466eb-b20f-45b6-aeb9-02f0fc07c6c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.742630] env[65503]: INFO nova.compute.manager [-] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Took 2.10 seconds to deallocate network for instance. [ 974.747346] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 974.747346] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fc26a8-e02c-c140-073f-b486ba866316" [ 974.747346] env[65503]: _type = "Task" [ 974.747346] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.765545] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fc26a8-e02c-c140-073f-b486ba866316, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.883114] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450392, 'name': ReconfigVM_Task, 'duration_secs': 0.345833} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.884806] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Reconfigured VM instance instance-00000051 to attach disk [datastore2] b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14/b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.885847] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a066df2-0582-40c7-80ea-1c02583c7f9e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.898796] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 974.898796] env[65503]: value = "task-4450393" [ 974.898796] env[65503]: _type = "Task" [ 974.898796] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.916524] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450393, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.945646] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450388, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.121685] env[65503]: DEBUG nova.scheduler.client.report [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 975.210838] env[65503]: DEBUG nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 975.243205] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 975.243205] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 975.243902] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 975.244609] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 975.244892] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 975.245150] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 975.245456] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 975.245699] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 975.245967] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 975.246180] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 975.246438] env[65503]: DEBUG nova.virt.hardware [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 975.247607] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c380d50-628c-44ab-9953-102e3603def5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.261610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.264578] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5dc335-7e91-4f12-af94-51508101f2f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.280357] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fc26a8-e02c-c140-073f-b486ba866316, 'name': SearchDatastore_Task, 'duration_secs': 0.091137} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.281415] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.281415] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.282534] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.282534] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.285081] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.285081] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a821296-11e3-4e83-a790-2060b918112a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.316222] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.318460] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.318460] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b72601b3-c5d5-45a0-b058-4828eb4b64d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.325302] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 975.325302] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fb7d6d-2377-6380-1a6a-dcecc1d1c94c" [ 975.325302] env[65503]: _type = "Task" [ 975.325302] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.337298] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fb7d6d-2377-6380-1a6a-dcecc1d1c94c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.379143] env[65503]: WARNING neutronclient.v2_0.client [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 975.379873] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 975.380275] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 975.411496] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450393, 'name': Rename_Task, 'duration_secs': 0.260671} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.412897] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.413698] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39c883c0-0c41-475f-ad9f-b716a1594df2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.422320] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 975.422320] env[65503]: value = "task-4450394" [ 975.422320] env[65503]: _type = "Task" [ 975.422320] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.433830] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450394, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.446679] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450388, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.636701] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.638413] env[65503]: DEBUG nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 975.643799] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.592s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.649035] env[65503]: INFO nova.compute.claims [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 975.746396] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 975.749251] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 975.797399] env[65503]: DEBUG nova.compute.manager [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Received event network-vif-plugged-d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 975.797399] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Acquiring lock "82415443-1884-4898-996e-828d23f67f23-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.797399] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Lock "82415443-1884-4898-996e-828d23f67f23-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.797399] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Lock "82415443-1884-4898-996e-828d23f67f23-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.797399] env[65503]: DEBUG nova.compute.manager [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] No waiting events found dispatching network-vif-plugged-d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 975.797399] env[65503]: WARNING nova.compute.manager [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Received unexpected event network-vif-plugged-d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f for instance with vm_state building and task_state spawning. [ 975.797399] env[65503]: DEBUG nova.compute.manager [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Received event network-changed-d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 975.797399] env[65503]: DEBUG nova.compute.manager [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Refreshing instance network info cache due to event network-changed-d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 975.797961] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Acquiring lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.797961] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Acquired lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.798076] env[65503]: DEBUG nova.network.neutron [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Refreshing network info cache for port d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 975.833175] env[65503]: WARNING neutronclient.v2_0.client [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 975.834199] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 975.834754] env[65503]: WARNING openstack [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 975.857066] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fb7d6d-2377-6380-1a6a-dcecc1d1c94c, 'name': SearchDatastore_Task, 'duration_secs': 0.094558} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.858103] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd5a47a0-a8b1-4636-b9eb-724e674321b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.868060] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 975.868060] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a08f33-4853-d375-77c1-c6d80cebe471" [ 975.868060] env[65503]: _type = "Task" [ 975.868060] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.880033] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a08f33-4853-d375-77c1-c6d80cebe471, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.937621] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450394, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.952357] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450388, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.011509] env[65503]: DEBUG nova.compute.manager [req-a011d648-83e7-46e8-a32b-3e5642e3f0c6 req-7ff498b6-93ea-40e3-9e50-e261f851e5c8 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-vif-plugged-995d778e-470e-4cfa-bb9e-b83d9780f51a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 976.011856] env[65503]: DEBUG oslo_concurrency.lockutils [req-a011d648-83e7-46e8-a32b-3e5642e3f0c6 req-7ff498b6-93ea-40e3-9e50-e261f851e5c8 service nova] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.012416] env[65503]: DEBUG oslo_concurrency.lockutils [req-a011d648-83e7-46e8-a32b-3e5642e3f0c6 req-7ff498b6-93ea-40e3-9e50-e261f851e5c8 service nova] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.012714] env[65503]: DEBUG oslo_concurrency.lockutils [req-a011d648-83e7-46e8-a32b-3e5642e3f0c6 req-7ff498b6-93ea-40e3-9e50-e261f851e5c8 service nova] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.012954] env[65503]: DEBUG nova.compute.manager [req-a011d648-83e7-46e8-a32b-3e5642e3f0c6 req-7ff498b6-93ea-40e3-9e50-e261f851e5c8 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] No waiting events found dispatching network-vif-plugged-995d778e-470e-4cfa-bb9e-b83d9780f51a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 976.013723] env[65503]: WARNING nova.compute.manager [req-a011d648-83e7-46e8-a32b-3e5642e3f0c6 req-7ff498b6-93ea-40e3-9e50-e261f851e5c8 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received unexpected event network-vif-plugged-995d778e-470e-4cfa-bb9e-b83d9780f51a for instance with vm_state active and task_state None. [ 976.017173] env[65503]: DEBUG nova.network.neutron [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "address": "fa:16:3e:15:00:96", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995d778e-47", "ovs_interfaceid": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 976.156012] env[65503]: DEBUG nova.compute.utils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 976.158500] env[65503]: DEBUG nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 976.158500] env[65503]: DEBUG nova.network.neutron [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 976.158913] env[65503]: WARNING neutronclient.v2_0.client [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 976.159680] env[65503]: WARNING neutronclient.v2_0.client [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 976.160647] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 976.161181] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 976.171344] env[65503]: DEBUG nova.network.neutron [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Successfully updated port: c5073b44-93ec-4c66-9634-09aeaf801426 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 976.256303] env[65503]: DEBUG nova.policy [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e2c1b5740924a1fbccede1d48e95f85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3ade9ce0dc44449bb7a3bf0c624e366', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 976.301811] env[65503]: WARNING neutronclient.v2_0.client [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 976.303385] env[65503]: WARNING openstack [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 976.304153] env[65503]: WARNING openstack [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 976.381575] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a08f33-4853-d375-77c1-c6d80cebe471, 'name': SearchDatastore_Task, 'duration_secs': 0.092631} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.382239] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.382904] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 82415443-1884-4898-996e-828d23f67f23/82415443-1884-4898-996e-828d23f67f23.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.383000] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59d2a6c7-27e8-4cb0-95b7-476b55ae7af1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.392457] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 976.392457] env[65503]: value = "task-4450395" [ 976.392457] env[65503]: _type = "Task" [ 976.392457] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.402967] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.437527] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450394, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.449357] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450388, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.83372} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.449357] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5/OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5.vmdk to [datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2.vmdk. [ 976.449606] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Cleaning up location [datastore2] OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 976.449766] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_0648663f-48d0-412d-a71a-3c99f9dff3e5 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.450141] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-380dbde1-a6d9-49a5-8f89-07ea2519c92d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.459484] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 976.459484] env[65503]: value = "task-4450396" [ 976.459484] env[65503]: _type = "Task" [ 976.459484] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.472480] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450396, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.520618] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.521372] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.521580] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.522425] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e93978b-5b1a-4037-a975-445fde73fbca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.544028] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 976.544357] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 976.544548] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 976.544782] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 976.544935] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 976.545127] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 976.545351] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 976.545533] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 976.545703] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 976.545885] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 976.546070] env[65503]: DEBUG nova.virt.hardware [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 976.555054] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfiguring VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 976.555792] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51923b53-80af-4989-8f2c-e1004f246a90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.578657] env[65503]: DEBUG oslo_vmware.api [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 976.578657] env[65503]: value = "task-4450397" [ 976.578657] env[65503]: _type = "Task" [ 976.578657] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.589243] env[65503]: DEBUG oslo_vmware.api [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450397, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.651135] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "a57486e1-82e3-48d5-99fe-c89b300a2136" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.651442] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.670609] env[65503]: DEBUG nova.network.neutron [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Successfully created port: 21904121-6624-489f-b851-76b0dfc15641 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 976.673700] env[65503]: DEBUG nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 976.679319] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "refresh_cache-a043a8e2-8661-4d80-939d-8e7b02b0459f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.679466] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquired lock "refresh_cache-a043a8e2-8661-4d80-939d-8e7b02b0459f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.679635] env[65503]: DEBUG nova.network.neutron [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 976.796396] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.796833] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.797282] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.797620] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.797884] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.801666] env[65503]: INFO nova.compute.manager [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Terminating instance [ 976.907033] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450395, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.936799] env[65503]: DEBUG oslo_vmware.api [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450394, 'name': PowerOnVM_Task, 'duration_secs': 1.482275} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.937127] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.937343] env[65503]: INFO nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Took 9.51 seconds to spawn the instance on the hypervisor. [ 976.937521] env[65503]: DEBUG nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 976.938493] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba126a2-d3f8-4811-8964-f2f614e562ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.943635] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb576d1e-0696-4476-bbbd-286e0ac65426 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.954772] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4af9a9-7745-4c2b-8b96-0dbde7424a6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.000264] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46684e48-b2c8-4ee9-ae15-e0b4c4d097a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.007861] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450396, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077776} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.010710] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.010710] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.011674] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2.vmdk to [datastore2] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 977.012148] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-016fd0f6-510e-49f0-a00b-bfb8283d7792 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.019455] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca3533b-8c8a-4092-81c5-bbd56680f12c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.037588] env[65503]: DEBUG nova.compute.provider_tree [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.043176] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 977.043176] env[65503]: value = "task-4450398" [ 977.043176] env[65503]: _type = "Task" [ 977.043176] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.066659] env[65503]: WARNING openstack [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 977.067147] env[65503]: WARNING openstack [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 977.091489] env[65503]: DEBUG oslo_vmware.api [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.155853] env[65503]: INFO nova.compute.manager [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Detaching volume 23bf6f56-0021-4fa7-800f-3751b9a663f9 [ 977.184161] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 977.184612] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 977.198540] env[65503]: INFO nova.virt.block_device [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Attempting to driver detach volume 23bf6f56-0021-4fa7-800f-3751b9a663f9 from mountpoint /dev/sdb [ 977.198925] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 977.199259] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870370', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'name': 'volume-23bf6f56-0021-4fa7-800f-3751b9a663f9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a57486e1-82e3-48d5-99fe-c89b300a2136', 'attached_at': '', 'detached_at': '', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'serial': '23bf6f56-0021-4fa7-800f-3751b9a663f9'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 977.200651] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e96943c-54f5-4ec6-985d-86c6434557ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.229761] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cb0659-7d56-4fef-903f-930065dc798e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.240786] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73c6c59-0c98-4127-a906-704834b37204 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.267622] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11f17f7-fc03-428b-a806-579e57912d71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.289379] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The volume has not been displaced from its original location: [datastore1] volume-23bf6f56-0021-4fa7-800f-3751b9a663f9/volume-23bf6f56-0021-4fa7-800f-3751b9a663f9.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 977.296028] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Reconfiguring VM instance instance-00000034 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 977.296413] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c5abba0-386b-48f5-b601-d6c193367a09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.316318] env[65503]: DEBUG nova.compute.manager [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 977.316639] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.317722] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d15d4a-0334-487f-a8c8-34675bae8228 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.331438] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 977.331438] env[65503]: value = "task-4450399" [ 977.331438] env[65503]: _type = "Task" [ 977.331438] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.331835] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.332538] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6de303d-5101-469a-b86d-46e34a2257c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.346971] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450399, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.406238] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658516} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.406592] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 82415443-1884-4898-996e-828d23f67f23/82415443-1884-4898-996e-828d23f67f23.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.407369] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.407369] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74989a80-5e8b-46fd-a1f4-a22a81333569 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.413563] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.413885] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.414106] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleting the datastore file [datastore1] 3b31611f-84f9-4bf6-8e26-f64db06d15ed {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.414431] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7adf287e-b8e2-4f98-8bf0-3d11f4dd14ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.418652] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 977.418652] env[65503]: value = "task-4450401" [ 977.418652] env[65503]: _type = "Task" [ 977.418652] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.426414] env[65503]: DEBUG oslo_vmware.api [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 977.426414] env[65503]: value = "task-4450402" [ 977.426414] env[65503]: _type = "Task" [ 977.426414] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.441709] env[65503]: DEBUG oslo_vmware.api [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.469978] env[65503]: INFO nova.compute.manager [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Took 27.01 seconds to build instance. [ 977.543035] env[65503]: DEBUG nova.scheduler.client.report [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.559388] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450398, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.594855] env[65503]: DEBUG oslo_vmware.api [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450397, 'name': ReconfigVM_Task, 'duration_secs': 0.975232} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.595824] env[65503]: WARNING neutronclient.v2_0.client [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 977.596124] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.596324] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfigured VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 977.695040] env[65503]: DEBUG nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 977.726215] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 977.726633] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.726789] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 977.726992] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.727255] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 977.727432] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 977.727735] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 977.727923] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 977.728148] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 977.728600] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 977.728600] env[65503]: DEBUG nova.virt.hardware [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 977.729739] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf56e57-3992-415d-94a6-0c645e94e018 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.740658] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5257a5-ff09-4bdc-ab5e-0c247e174906 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.845544] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.932091] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450401, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.942267] env[65503]: DEBUG oslo_vmware.api [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.972656] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51fedf11-7e50-4069-aca8-a6a2fe42bc24 tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.524s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.015033] env[65503]: DEBUG nova.network.neutron [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 978.054308] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.054611] env[65503]: DEBUG nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 978.059582] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.798s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.059923] env[65503]: DEBUG nova.objects.instance [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lazy-loading 'resources' on Instance uuid 8962b1b8-4875-4a1a-b231-36385755a976 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.069054] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450398, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.101753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-af5a2a7f-8324-44cb-8cc5-82951deb9830 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.461s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.345336] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450399, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.432034] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.639787} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.432734] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.438111] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffdc79d-b688-4c7f-a22f-1b0c674d7969 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.450626] env[65503]: DEBUG oslo_vmware.api [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.94748} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.460178] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.460178] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.460178] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.460178] env[65503]: INFO nova.compute.manager [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Took 1.14 seconds to destroy the instance on the hypervisor. [ 978.460414] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 978.470060] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 82415443-1884-4898-996e-828d23f67f23/82415443-1884-4898-996e-828d23f67f23.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.470379] env[65503]: DEBUG nova.compute.manager [-] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 978.470507] env[65503]: DEBUG nova.network.neutron [-] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 978.470753] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.471358] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 978.471591] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 978.478606] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aad9bc3b-15d4-40cd-b010-757feb73034c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.502130] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 978.502130] env[65503]: value = "task-4450403" [ 978.502130] env[65503]: _type = "Task" [ 978.502130] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.516000] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.560394] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450398, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.564666] env[65503]: DEBUG nova.compute.utils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 978.566399] env[65503]: DEBUG nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 978.566668] env[65503]: DEBUG nova.network.neutron [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 978.567063] env[65503]: WARNING neutronclient.v2_0.client [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.567487] env[65503]: WARNING neutronclient.v2_0.client [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.568106] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 978.568483] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 978.583431] env[65503]: DEBUG nova.network.neutron [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Successfully updated port: 21904121-6624-489f-b851-76b0dfc15641 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 978.612097] env[65503]: WARNING neutronclient.v2_0.client [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.612856] env[65503]: WARNING openstack [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 978.613203] env[65503]: WARNING openstack [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 978.676440] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 978.677054] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 978.750039] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.801022] env[65503]: DEBUG nova.policy [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e2c1b5740924a1fbccede1d48e95f85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3ade9ce0dc44449bb7a3bf0c624e366', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 978.856922] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450399, 'name': ReconfigVM_Task, 'duration_secs': 1.069213} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.858699] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Reconfigured VM instance instance-00000034 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 978.864919] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247cbbbc-b528-4954-a572-1727d49cf130 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.873814] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99ea2520-7d05-4da5-a8c3-1df0c1b405ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.894928] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314764cb-0914-4082-a695-972233ed50b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.900165] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 978.900165] env[65503]: value = "task-4450404" [ 978.900165] env[65503]: _type = "Task" [ 978.900165] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.944065] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8ace4a-b097-4e4b-9ef6-db940dc4201e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.947683] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450404, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.954531] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38127e37-ee97-45a5-bdb2-9d9e14c6d207 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.973866] env[65503]: DEBUG nova.compute.provider_tree [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.017884] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450403, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.063993] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450398, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.083556] env[65503]: DEBUG nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 979.090871] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.091085] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.091236] env[65503]: DEBUG nova.network.neutron [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 979.316648] env[65503]: DEBUG nova.network.neutron [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Updated VIF entry in instance network info cache for port d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 979.317048] env[65503]: DEBUG nova.network.neutron [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance_info_cache with network_info: [{"id": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "address": "fa:16:3e:6c:9a:41", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e6ffe6-e1", "ovs_interfaceid": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 979.346374] env[65503]: WARNING neutronclient.v2_0.client [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 979.347094] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 979.347946] env[65503]: WARNING openstack [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 979.401676] env[65503]: DEBUG nova.network.neutron [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Successfully created port: 6c742bed-a172-46d4-9ac0-cc3025ddff75 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 979.416671] env[65503]: DEBUG oslo_vmware.api [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450404, 'name': ReconfigVM_Task, 'duration_secs': 0.488535} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.417030] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870370', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'name': 'volume-23bf6f56-0021-4fa7-800f-3751b9a663f9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a57486e1-82e3-48d5-99fe-c89b300a2136', 'attached_at': '', 'detached_at': '', 'volume_id': '23bf6f56-0021-4fa7-800f-3751b9a663f9', 'serial': '23bf6f56-0021-4fa7-800f-3751b9a663f9'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 979.479647] env[65503]: DEBUG nova.scheduler.client.report [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.492968] env[65503]: DEBUG nova.network.neutron [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Updating instance_info_cache with network_info: [{"id": "c5073b44-93ec-4c66-9634-09aeaf801426", "address": "fa:16:3e:1a:7f:ab", "network": {"id": "3025296c-bbb8-421f-bad6-0ffc2ad2e087", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-772964545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34a784de8c744d988015e75cf157103e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5073b44-93", "ovs_interfaceid": "c5073b44-93ec-4c66-9634-09aeaf801426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 979.523086] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450403, 'name': ReconfigVM_Task, 'duration_secs': 0.523963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.523698] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 82415443-1884-4898-996e-828d23f67f23/82415443-1884-4898-996e-828d23f67f23.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.525323] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0176c82-a979-4d15-8789-2a71bbde805d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.538035] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 979.538035] env[65503]: value = "task-4450405" [ 979.538035] env[65503]: _type = "Task" [ 979.538035] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.551598] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450405, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.569341] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450398, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.523048} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.569842] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2/b21b686c-8154-4b2b-9ac4-03e9bcfc50c2.vmdk to [datastore2] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.572032] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4dad6b-99bc-41fb-92d5-65f912d30879 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.598757] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.604581] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 979.605592] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 979.613180] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23bfc49d-ebfb-4758-b156-23a4011bca39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.638593] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 979.638593] env[65503]: value = "task-4450406" [ 979.638593] env[65503]: _type = "Task" [ 979.638593] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.650045] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450406, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.677964] env[65503]: DEBUG nova.network.neutron [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 979.763687] env[65503]: DEBUG nova.network.neutron [-] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 979.769512] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 979.769954] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 979.820501] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f909628-49bb-49da-847c-3f0556ffb69e req-70cdf85f-f0d2-492b-80ee-965f538cd55f service nova] Releasing lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.913963] env[65503]: WARNING neutronclient.v2_0.client [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 979.914729] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 979.915123] env[65503]: WARNING openstack [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 979.978369] env[65503]: DEBUG nova.objects.instance [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'flavor' on Instance uuid a57486e1-82e3-48d5-99fe-c89b300a2136 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.990661] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.004879] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Releasing lock "refresh_cache-a043a8e2-8661-4d80-939d-8e7b02b0459f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.005278] env[65503]: DEBUG nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Instance network_info: |[{"id": "c5073b44-93ec-4c66-9634-09aeaf801426", "address": "fa:16:3e:1a:7f:ab", "network": {"id": "3025296c-bbb8-421f-bad6-0ffc2ad2e087", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-772964545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34a784de8c744d988015e75cf157103e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5073b44-93", "ovs_interfaceid": "c5073b44-93ec-4c66-9634-09aeaf801426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 980.005969] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:7f:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5073b44-93ec-4c66-9634-09aeaf801426', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.015315] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Creating folder: Project (34a784de8c744d988015e75cf157103e). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 980.016504] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee876b3d-86bf-4c23-9b9b-8762366240ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.020090] env[65503]: INFO nova.scheduler.client.report [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Deleted allocations for instance 8962b1b8-4875-4a1a-b231-36385755a976 [ 980.032926] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Created folder: Project (34a784de8c744d988015e75cf157103e) in parent group-v870190. [ 980.032926] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Creating folder: Instances. Parent ref: group-v870429. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 980.032926] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee96af0e-e3bd-4b76-8f14-cda1decacc27 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.043992] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Created folder: Instances in parent group-v870429. [ 980.044631] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 980.048286] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.049075] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450405, 'name': Rename_Task, 'duration_secs': 0.207278} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.049307] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-533b6de2-450d-44e1-ab1f-556b9fdeaf8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.065251] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 980.065686] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.065942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.066163] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.066345] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.066508] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.068287] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2de6f14-874b-49da-8dfb-73bb05205a86 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.070194] env[65503]: INFO nova.compute.manager [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Terminating instance [ 980.078285] env[65503]: DEBUG nova.network.neutron [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Updating instance_info_cache with network_info: [{"id": "21904121-6624-489f-b851-76b0dfc15641", "address": "fa:16:3e:cd:01:68", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21904121-66", "ovs_interfaceid": "21904121-6624-489f-b851-76b0dfc15641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 980.085920] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 980.085920] env[65503]: value = "task-4450409" [ 980.085920] env[65503]: _type = "Task" [ 980.085920] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.086187] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.086187] env[65503]: value = "task-4450410" [ 980.086187] env[65503]: _type = "Task" [ 980.086187] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.105122] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450409, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.107967] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450410, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.132781] env[65503]: DEBUG nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 980.151973] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450406, 'name': ReconfigVM_Task, 'duration_secs': 0.30225} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.152551] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Reconfigured VM instance instance-0000002f to attach disk [datastore2] ff256d3f-af88-4f01-bdfd-cf89e06ab364/ff256d3f-af88-4f01-bdfd-cf89e06ab364.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.153530] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09c96f05-6ec0-418e-b2e9-2f44a1adea57 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.161342] env[65503]: DEBUG nova.compute.manager [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Received event network-vif-deleted-53fe187f-f12f-40ad-94fa-75bb9f0182ed {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 980.161596] env[65503]: DEBUG nova.compute.manager [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Received event network-vif-plugged-c5073b44-93ec-4c66-9634-09aeaf801426 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 980.161911] env[65503]: DEBUG oslo_concurrency.lockutils [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Acquiring lock "a043a8e2-8661-4d80-939d-8e7b02b0459f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.162231] env[65503]: DEBUG oslo_concurrency.lockutils [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.162426] env[65503]: DEBUG oslo_concurrency.lockutils [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.162637] env[65503]: DEBUG nova.compute.manager [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] No waiting events found dispatching network-vif-plugged-c5073b44-93ec-4c66-9634-09aeaf801426 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 980.162843] env[65503]: WARNING nova.compute.manager [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Received unexpected event network-vif-plugged-c5073b44-93ec-4c66-9634-09aeaf801426 for instance with vm_state building and task_state spawning. [ 980.163101] env[65503]: DEBUG nova.compute.manager [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Received event network-changed-c5073b44-93ec-4c66-9634-09aeaf801426 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 980.163304] env[65503]: DEBUG nova.compute.manager [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Refreshing instance network info cache due to event network-changed-c5073b44-93ec-4c66-9634-09aeaf801426. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 980.163544] env[65503]: DEBUG oslo_concurrency.lockutils [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Acquiring lock "refresh_cache-a043a8e2-8661-4d80-939d-8e7b02b0459f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.163677] env[65503]: DEBUG oslo_concurrency.lockutils [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Acquired lock "refresh_cache-a043a8e2-8661-4d80-939d-8e7b02b0459f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.163828] env[65503]: DEBUG nova.network.neutron [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Refreshing network info cache for port c5073b44-93ec-4c66-9634-09aeaf801426 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 980.170162] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 980.170162] env[65503]: value = "task-4450411" [ 980.170162] env[65503]: _type = "Task" [ 980.170162] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.182048] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 980.182312] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 980.182460] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 980.182685] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 980.182764] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 980.183251] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 980.183251] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.183251] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 980.183551] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 980.183623] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 980.183788] env[65503]: DEBUG nova.virt.hardware [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 980.185167] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699a8911-c407-4c6d-9f6f-1de5b55694be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.193575] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450411, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.197697] env[65503]: DEBUG nova.compute.manager [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-changed-995d778e-470e-4cfa-bb9e-b83d9780f51a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 980.197889] env[65503]: DEBUG nova.compute.manager [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing instance network info cache due to event network-changed-995d778e-470e-4cfa-bb9e-b83d9780f51a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 980.198118] env[65503]: DEBUG oslo_concurrency.lockutils [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.198256] env[65503]: DEBUG oslo_concurrency.lockutils [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.198405] env[65503]: DEBUG nova.network.neutron [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing network info cache for port 995d778e-470e-4cfa-bb9e-b83d9780f51a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 980.204172] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739ff18e-e094-4f46-9b39-005b164aeacd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.269133] env[65503]: INFO nova.compute.manager [-] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Took 1.80 seconds to deallocate network for instance. [ 980.530070] env[65503]: DEBUG oslo_concurrency.lockutils [None req-300f9007-f3f9-438b-85fb-137273be053f tempest-ImagesOneServerNegativeTestJSON-493093067 tempest-ImagesOneServerNegativeTestJSON-493093067-project-member] Lock "8962b1b8-4875-4a1a-b231-36385755a976" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.657s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.585962] env[65503]: DEBUG nova.compute.manager [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 980.586217] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 980.587197] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1f95ab-5892-4def-ab9b-4ea88cef6ead {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.590493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.590934] env[65503]: DEBUG nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Instance network_info: |[{"id": "21904121-6624-489f-b851-76b0dfc15641", "address": "fa:16:3e:cd:01:68", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21904121-66", "ovs_interfaceid": "21904121-6624-489f-b851-76b0dfc15641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 980.591506] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:01:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21904121-6624-489f-b851-76b0dfc15641', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.599233] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating folder: Project (d3ade9ce0dc44449bb7a3bf0c624e366). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 980.606886] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ce8bf73-a1ff-4197-b05b-8732bcc81772 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.612535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 980.619960] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8369058-4e29-45f1-bf96-299704a1a072 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.621696] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450410, 'name': CreateVM_Task, 'duration_secs': 0.470732} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.621974] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450409, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.623339] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 980.623659] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created folder: Project (d3ade9ce0dc44449bb7a3bf0c624e366) in parent group-v870190. [ 980.623866] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating folder: Instances. Parent ref: group-v870432. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 980.624926] env[65503]: WARNING neutronclient.v2_0.client [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 980.625371] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.625553] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.625954] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 980.626211] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c849d74-3c3f-4acd-a4b0-659c58c41c7f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.627896] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89620cd8-e411-4b5b-be14-9dbac052c724 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.633219] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 980.633219] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c4e221-1252-b2ae-1bb3-e93b93349294" [ 980.633219] env[65503]: _type = "Task" [ 980.633219] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.639622] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created folder: Instances in parent group-v870432. [ 980.639922] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 980.640532] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e37758cc-7287-4271-ad47-d711201d0add] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.640747] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be8e67f7-5f08-4f99-a6f5-8d859abb3ed9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.661405] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c4e221-1252-b2ae-1bb3-e93b93349294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.668491] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.668491] env[65503]: value = "task-4450415" [ 980.668491] env[65503]: _type = "Task" [ 980.668491] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.672963] env[65503]: WARNING neutronclient.v2_0.client [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 980.673886] env[65503]: WARNING openstack [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 980.674256] env[65503]: WARNING openstack [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 980.694322] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450411, 'name': Rename_Task, 'duration_secs': 0.157868} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.699747] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 980.700270] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450415, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.701294] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48341b51-b7f2-4b91-9a58-791b0c42066d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.704782] env[65503]: WARNING neutronclient.v2_0.client [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 980.705463] env[65503]: WARNING openstack [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 980.705805] env[65503]: WARNING openstack [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 980.714224] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 980.714520] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 980.714839] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleting the datastore file [datastore1] 81e27e9b-7d7e-4b04-8257-268660fd9ec3 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 980.715896] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df007ee6-6ace-47d0-8b0a-205f8b519077 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.722140] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 980.722140] env[65503]: value = "task-4450416" [ 980.722140] env[65503]: _type = "Task" [ 980.722140] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.728776] env[65503]: DEBUG oslo_vmware.api [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 980.728776] env[65503]: value = "task-4450417" [ 980.728776] env[65503]: _type = "Task" [ 980.728776] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.735213] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.744972] env[65503]: DEBUG oslo_vmware.api [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.779417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.779682] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.780252] env[65503]: DEBUG nova.objects.instance [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lazy-loading 'resources' on Instance uuid 3b31611f-84f9-4bf6-8e26-f64db06d15ed {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.896496] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.896872] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.986763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fd949dd-674e-4a3f-b66b-650dd1bc5226 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.335s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.107392] env[65503]: DEBUG oslo_vmware.api [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450409, 'name': PowerOnVM_Task, 'duration_secs': 0.735502} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.107738] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.108406] env[65503]: INFO nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Took 11.24 seconds to spawn the instance on the hypervisor. [ 981.108406] env[65503]: DEBUG nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 981.109595] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3446ca2-b3a5-400a-afae-d7de32c5b16d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.114601] env[65503]: DEBUG nova.network.neutron [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Successfully updated port: 6c742bed-a172-46d4-9ac0-cc3025ddff75 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 981.154744] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c4e221-1252-b2ae-1bb3-e93b93349294, 'name': SearchDatastore_Task, 'duration_secs': 0.028704} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.155184] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.156035] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.156035] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.156035] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.156217] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.156595] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58235404-e4bb-4ed7-a72b-c5153bb2517e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.176558] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.177348] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.178743] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4b6d981-516b-4f19-8b29-d4a7133091ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.183723] env[65503]: WARNING openstack [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.184511] env[65503]: WARNING openstack [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.196438] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450415, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.198479] env[65503]: WARNING openstack [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.199028] env[65503]: WARNING openstack [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.214708] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 981.214708] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522b94b4-588c-ef34-f3cd-a828feb0c082" [ 981.214708] env[65503]: _type = "Task" [ 981.214708] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.235470] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522b94b4-588c-ef34-f3cd-a828feb0c082, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.245883] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450416, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.250485] env[65503]: DEBUG oslo_vmware.api [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26777} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.251185] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 981.251393] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 981.251493] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 981.251765] env[65503]: INFO nova.compute.manager [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Took 0.67 seconds to destroy the instance on the hypervisor. [ 981.252068] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 981.252696] env[65503]: DEBUG nova.compute.manager [-] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 981.252824] env[65503]: DEBUG nova.network.neutron [-] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 981.253088] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.253700] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.253975] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.350059] env[65503]: WARNING neutronclient.v2_0.client [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.350809] env[65503]: WARNING openstack [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.351235] env[65503]: WARNING openstack [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.381776] env[65503]: WARNING neutronclient.v2_0.client [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.382610] env[65503]: WARNING openstack [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.382973] env[65503]: WARNING openstack [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.394653] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.399562] env[65503]: DEBUG nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 981.490279] env[65503]: DEBUG nova.network.neutron [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Updated VIF entry in instance network info cache for port c5073b44-93ec-4c66-9634-09aeaf801426. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 981.490652] env[65503]: DEBUG nova.network.neutron [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Updating instance_info_cache with network_info: [{"id": "c5073b44-93ec-4c66-9634-09aeaf801426", "address": "fa:16:3e:1a:7f:ab", "network": {"id": "3025296c-bbb8-421f-bad6-0ffc2ad2e087", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-772964545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34a784de8c744d988015e75cf157103e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5073b44-93", "ovs_interfaceid": "c5073b44-93ec-4c66-9634-09aeaf801426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 981.525343] env[65503]: DEBUG nova.network.neutron [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updated VIF entry in instance network info cache for port 995d778e-470e-4cfa-bb9e-b83d9780f51a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 981.525504] env[65503]: DEBUG nova.network.neutron [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "address": "fa:16:3e:15:00:96", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995d778e-47", "ovs_interfaceid": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 981.562701] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff77d39-d132-46cd-84c0-65e313b44ddc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.573601] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3affdb83-e0e0-45f9-8943-dc4230adac27 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.610169] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4563e9eb-f383-4e87-8ad7-431812cd979c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.618263] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.619014] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.619014] env[65503]: DEBUG nova.network.neutron [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 981.621389] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e481b4f-35eb-4bd6-97dd-eac952fecc5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.628716] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.629470] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.653952] env[65503]: DEBUG nova.compute.provider_tree [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.657067] env[65503]: INFO nova.compute.manager [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Took 25.09 seconds to build instance. [ 981.674470] env[65503]: DEBUG nova.network.neutron [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 981.687609] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450415, 'name': CreateVM_Task, 'duration_secs': 0.552516} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.691327] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e37758cc-7287-4271-ad47-d711201d0add] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.691327] env[65503]: WARNING neutronclient.v2_0.client [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.691327] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.691327] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.691327] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 981.691327] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea72f807-c93c-4690-8634-d5c87584cff0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.702652] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.706026] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.711714] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 981.711714] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52abd611-bae5-904b-09d5-fb34b0d56bb1" [ 981.711714] env[65503]: _type = "Task" [ 981.711714] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.713322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-e4ff15a9-ecda-42a3-8724-0d6cab24c894" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.713581] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-e4ff15a9-ecda-42a3-8724-0d6cab24c894" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.715929] env[65503]: DEBUG nova.objects.instance [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'flavor' on Instance uuid 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.739199] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52abd611-bae5-904b-09d5-fb34b0d56bb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.739199] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522b94b4-588c-ef34-f3cd-a828feb0c082, 'name': SearchDatastore_Task, 'duration_secs': 0.013951} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.749045] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd4654cd-49a1-45f9-a701-efa9e9ae4e1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.752211] env[65503]: DEBUG oslo_vmware.api [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450416, 'name': PowerOnVM_Task, 'duration_secs': 0.621547} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.752211] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.757469] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 981.757469] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527cdfe6-9aca-7af4-c116-0d04fe4020ee" [ 981.757469] env[65503]: _type = "Task" [ 981.757469] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.768191] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527cdfe6-9aca-7af4-c116-0d04fe4020ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.781941] env[65503]: WARNING neutronclient.v2_0.client [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.782656] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.786019] env[65503]: WARNING openstack [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.909998] env[65503]: DEBUG nova.compute.manager [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 981.910938] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0200f21f-98b6-4e1c-9234-cde2049949ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.916402] env[65503]: DEBUG nova.network.neutron [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Updating instance_info_cache with network_info: [{"id": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "address": "fa:16:3e:52:86:75", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c742bed-a1", "ovs_interfaceid": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 981.931026] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.994736] env[65503]: DEBUG oslo_concurrency.lockutils [req-38ee3e92-2f87-4536-bfab-3603459dc3b0 req-8c691423-4339-4938-a5bf-435cb9fb0020 service nova] Releasing lock "refresh_cache-a043a8e2-8661-4d80-939d-8e7b02b0459f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.028289] env[65503]: DEBUG oslo_concurrency.lockutils [req-4f0f38db-8e8c-420d-8330-c0337e7813fe req-3dd6d176-cee8-4c06-8fd6-50e3c8110883 service nova] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.159315] env[65503]: DEBUG nova.scheduler.client.report [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 982.163245] env[65503]: DEBUG nova.network.neutron [-] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 982.164523] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7ca259e7-653e-487a-8cbe-1cfcc99de7a4 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.611s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.222076] env[65503]: WARNING neutronclient.v2_0.client [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 982.222940] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 982.223315] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 982.246438] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52abd611-bae5-904b-09d5-fb34b0d56bb1, 'name': SearchDatastore_Task, 'duration_secs': 0.03273} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.246873] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.247253] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.247870] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.272179] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527cdfe6-9aca-7af4-c116-0d04fe4020ee, 'name': SearchDatastore_Task, 'duration_secs': 0.017164} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.276428] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.276781] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] a043a8e2-8661-4d80-939d-8e7b02b0459f/a043a8e2-8661-4d80-939d-8e7b02b0459f.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.277709] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.277886] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.278170] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-906d2bd9-b19a-4974-a823-f449d1aee6c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.280528] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01dd2b2c-4b7f-402d-8ffa-055a7ba6c38d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.290281] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 982.290281] env[65503]: value = "task-4450418" [ 982.290281] env[65503]: _type = "Task" [ 982.290281] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.297367] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.297834] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.299437] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d450d7-df54-4a6f-916f-b171ddf8a449 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.308544] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.312917] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 982.312917] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520838ef-1f4d-8a09-307d-0f676e7a5696" [ 982.312917] env[65503]: _type = "Task" [ 982.312917] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.326949] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520838ef-1f4d-8a09-307d-0f676e7a5696, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.407178] env[65503]: DEBUG nova.objects.instance [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'pci_requests' on Instance uuid 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.419443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.419994] env[65503]: DEBUG nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Instance network_info: |[{"id": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "address": "fa:16:3e:52:86:75", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c742bed-a1", "ovs_interfaceid": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 982.420862] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:86:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c742bed-a172-46d4-9ac0-cc3025ddff75', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.429212] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 982.433709] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.434113] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "a57486e1-82e3-48d5-99fe-c89b300a2136" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.434338] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.434537] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "a57486e1-82e3-48d5-99fe-c89b300a2136-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.434711] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.434882] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.437018] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d923e545-4186-418e-bf05-57dfb1af8b7c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.455612] env[65503]: INFO nova.compute.manager [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Terminating instance [ 982.459569] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b7eb746-4601-4b67-93cd-2e4d9075a0ab tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 35.710s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.466325] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.466325] env[65503]: value = "task-4450419" [ 982.466325] env[65503]: _type = "Task" [ 982.466325] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.480869] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450419, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.620833] env[65503]: DEBUG nova.compute.manager [req-010b9973-28e5-46dc-9a8c-ea3227d39323 req-9a2191f1-3032-40f8-8b53-3f3f79f26a69 service nova] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Received event network-vif-deleted-b2f08e99-4a10-4a2d-8aff-83b8e5454187 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 982.621234] env[65503]: DEBUG nova.compute.manager [req-010b9973-28e5-46dc-9a8c-ea3227d39323 req-9a2191f1-3032-40f8-8b53-3f3f79f26a69 service nova] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Received event network-vif-deleted-aa79a652-2e09-4670-8d1d-195369c94b2b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 982.667334] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.671774] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.743s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.674165] env[65503]: INFO nova.compute.claims [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.677546] env[65503]: INFO nova.compute.manager [-] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Took 1.42 seconds to deallocate network for instance. [ 982.679448] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Received event network-vif-plugged-21904121-6624-489f-b851-76b0dfc15641 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 982.680020] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Acquiring lock "e37758cc-7287-4271-ad47-d711201d0add-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.680020] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Lock "e37758cc-7287-4271-ad47-d711201d0add-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.680395] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Lock "e37758cc-7287-4271-ad47-d711201d0add-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.680861] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] No waiting events found dispatching network-vif-plugged-21904121-6624-489f-b851-76b0dfc15641 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 982.680861] env[65503]: WARNING nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Received unexpected event network-vif-plugged-21904121-6624-489f-b851-76b0dfc15641 for instance with vm_state building and task_state spawning. [ 982.681074] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Received event network-changed-21904121-6624-489f-b851-76b0dfc15641 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 982.681300] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Refreshing instance network info cache due to event network-changed-21904121-6624-489f-b851-76b0dfc15641. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 982.681536] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Acquiring lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.681731] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Acquired lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.681933] env[65503]: DEBUG nova.network.neutron [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Refreshing network info cache for port 21904121-6624-489f-b851-76b0dfc15641 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 982.710989] env[65503]: INFO nova.scheduler.client.report [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted allocations for instance 3b31611f-84f9-4bf6-8e26-f64db06d15ed [ 982.747676] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.748155] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.748540] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.748759] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.749031] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.752952] env[65503]: INFO nova.compute.manager [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Terminating instance [ 982.802833] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450418, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.826464] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520838ef-1f4d-8a09-307d-0f676e7a5696, 'name': SearchDatastore_Task, 'duration_secs': 0.0138} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.827501] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7be169a-f545-4be7-82d5-ee5512da2b1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.834835] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 982.834835] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52537189-ad4b-aded-62cd-400636384012" [ 982.834835] env[65503]: _type = "Task" [ 982.834835] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.847444] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52537189-ad4b-aded-62cd-400636384012, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.911506] env[65503]: DEBUG nova.objects.base [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Object Instance<2a1587cd-8b47-439f-948c-d58a5dc8220e> lazy-loaded attributes: flavor,pci_requests {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 982.911506] env[65503]: DEBUG nova.network.neutron [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 982.911996] env[65503]: WARNING neutronclient.v2_0.client [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 982.912519] env[65503]: WARNING neutronclient.v2_0.client [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 982.913402] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 982.913875] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 982.963163] env[65503]: DEBUG nova.compute.manager [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 982.963594] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.967183] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b5f199-1a4c-432f-889b-1f463f26592f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.979706] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450419, 'name': CreateVM_Task, 'duration_secs': 0.442125} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.981867] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.982286] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.982799] env[65503]: WARNING neutronclient.v2_0.client [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 982.983180] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.983330] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.983678] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 982.983917] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de3b1a5f-a74c-4286-831c-14f6028d5bce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.985580] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3052e2f-bc9b-4594-ac30-57c4a9676dcd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.992189] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 982.992189] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52286663-785b-2c59-e559-f9994c67ff32" [ 982.992189] env[65503]: _type = "Task" [ 982.992189] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.999512] env[65503]: DEBUG oslo_vmware.api [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 982.999512] env[65503]: value = "task-4450420" [ 982.999512] env[65503]: _type = "Task" [ 982.999512] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.008297] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52286663-785b-2c59-e559-f9994c67ff32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.014570] env[65503]: DEBUG oslo_vmware.api [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.031782] env[65503]: DEBUG nova.policy [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 983.188459] env[65503]: WARNING neutronclient.v2_0.client [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 983.189733] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 983.190603] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 983.201814] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.220913] env[65503]: DEBUG oslo_concurrency.lockutils [None req-13d33121-87b6-4fc8-8f62-5987575c512f tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "3b31611f-84f9-4bf6-8e26-f64db06d15ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.424s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.259123] env[65503]: DEBUG nova.compute.manager [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 983.259424] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.260799] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715c4cfa-ee25-4eb7-a9c8-d65415b5e2c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.273975] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.274684] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98f11419-77cb-4b94-b454-c2461c392069 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.284938] env[65503]: DEBUG oslo_vmware.api [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 983.284938] env[65503]: value = "task-4450421" [ 983.284938] env[65503]: _type = "Task" [ 983.284938] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.297481] env[65503]: DEBUG oslo_vmware.api [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.310317] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450418, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670898} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.310718] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] a043a8e2-8661-4d80-939d-8e7b02b0459f/a043a8e2-8661-4d80-939d-8e7b02b0459f.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.311036] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.311388] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffc89d35-3a5f-40a0-a0e9-76afe2e8cc75 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.320948] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 983.320948] env[65503]: value = "task-4450422" [ 983.320948] env[65503]: _type = "Task" [ 983.320948] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.331950] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450422, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.348727] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52537189-ad4b-aded-62cd-400636384012, 'name': SearchDatastore_Task, 'duration_secs': 0.057344} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.349142] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.350227] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] e37758cc-7287-4271-ad47-d711201d0add/e37758cc-7287-4271-ad47-d711201d0add.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 983.350227] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e420beb3-3143-42ed-a3a6-aefe2c57f1a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.358908] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 983.358908] env[65503]: value = "task-4450423" [ 983.358908] env[65503]: _type = "Task" [ 983.358908] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.370819] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.509329] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52286663-785b-2c59-e559-f9994c67ff32, 'name': SearchDatastore_Task, 'duration_secs': 0.016641} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.510100] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.510360] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.510622] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.510806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.510956] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.511811] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be554d3a-065f-41c2-828f-afc129da566a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.518322] env[65503]: DEBUG oslo_vmware.api [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450420, 'name': PowerOffVM_Task, 'duration_secs': 0.35804} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.519071] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.519305] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.519625] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b37d0e0-c42c-4587-b0f8-c8afa7334871 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.527384] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.527650] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.528699] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99a24e8c-a438-47ea-99d8-71269db13716 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.536097] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 983.536097] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525689d9-65f0-c5fd-b68f-5d0e707b3fe3" [ 983.536097] env[65503]: _type = "Task" [ 983.536097] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.545437] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525689d9-65f0-c5fd-b68f-5d0e707b3fe3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.604535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.604741] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.604908] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleting the datastore file [datastore2] a57486e1-82e3-48d5-99fe-c89b300a2136 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.605273] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e28bbbea-d4f8-48c9-a15a-259c80dae2f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.613351] env[65503]: DEBUG oslo_vmware.api [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 983.613351] env[65503]: value = "task-4450425" [ 983.613351] env[65503]: _type = "Task" [ 983.613351] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.623607] env[65503]: DEBUG oslo_vmware.api [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.801180] env[65503]: DEBUG oslo_vmware.api [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450421, 'name': PowerOffVM_Task, 'duration_secs': 0.247063} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.804320] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.804517] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.805088] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-623eee29-cfd6-4d7b-b255-b3b9fc53243f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.836922] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450422, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093573} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.837681] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.838048] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5157339-80d9-4325-a159-31e2c72202cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.865776] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] a043a8e2-8661-4d80-939d-8e7b02b0459f/a043a8e2-8661-4d80-939d-8e7b02b0459f.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.869088] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5daefc6-726a-4e86-9903-334d0185bf07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.894297] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.895679] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.896867] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Deleting the datastore file [datastore2] b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.902969] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d832a25f-2899-49e3-8307-a127db5cb38f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.906147] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450423, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.908498] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 983.908498] env[65503]: value = "task-4450427" [ 983.908498] env[65503]: _type = "Task" [ 983.908498] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.914890] env[65503]: DEBUG oslo_vmware.api [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for the task: (returnval){ [ 983.914890] env[65503]: value = "task-4450428" [ 983.914890] env[65503]: _type = "Task" [ 983.914890] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.924300] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450427, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.937295] env[65503]: DEBUG oslo_vmware.api [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450428, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.979744] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca90d52-4ec6-4034-b2e1-8e6764382857 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.990874] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1d958f-6335-4b54-b47f-c3aa9314d555 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.998520] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a2263-d7fa-63f2-76d6-6282b0435403/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 983.999684] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e48347-49e3-45b2-9b3e-6119c13c508a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.008323] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a2263-d7fa-63f2-76d6-6282b0435403/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 984.008527] env[65503]: ERROR oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a2263-d7fa-63f2-76d6-6282b0435403/disk-0.vmdk due to incomplete transfer. [ 984.037739] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cc931925-f5c5-4fcf-949d-511d335b5935 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.049668] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977ec42a-2bd4-4c4d-bce0-4897bf2837cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.063588] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525689d9-65f0-c5fd-b68f-5d0e707b3fe3, 'name': SearchDatastore_Task, 'duration_secs': 0.017528} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.066210] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a916d16e-be42-4005-95de-bf3bc39276ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.072027] env[65503]: DEBUG oslo_vmware.rw_handles [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a2263-d7fa-63f2-76d6-6282b0435403/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 984.073643] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Uploaded image 0f5177f1-4307-452a-b134-570a3098c1b9 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 984.075077] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 984.075387] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12610abf-b23c-4005-bb83-2192e19d39c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.078383] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3549b6d7-981b-4d67-a6e5-7c5a95f787d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.097854] env[65503]: DEBUG nova.compute.provider_tree [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.103109] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 984.103109] env[65503]: value = "task-4450429" [ 984.103109] env[65503]: _type = "Task" [ 984.103109] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.103109] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 984.103109] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52878ac6-0403-9960-2c8b-ff70f66c9fdf" [ 984.103109] env[65503]: _type = "Task" [ 984.103109] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.117089] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450429, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.123968] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52878ac6-0403-9960-2c8b-ff70f66c9fdf, 'name': SearchDatastore_Task, 'duration_secs': 0.02496} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.124703] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.125013] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.125785] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b8d0c7c-4f5b-44ef-a81e-4a62a3440547 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.132147] env[65503]: DEBUG oslo_vmware.api [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.483821} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.132969] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.133195] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.133375] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.133693] env[65503]: INFO nova.compute.manager [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Took 1.17 seconds to destroy the instance on the hypervisor. [ 984.133879] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 984.134536] env[65503]: DEBUG nova.compute.manager [-] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 984.134650] env[65503]: DEBUG nova.network.neutron [-] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 984.134928] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 984.135588] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 984.135887] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 984.145165] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 984.145165] env[65503]: value = "task-4450430" [ 984.145165] env[65503]: _type = "Task" [ 984.145165] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.155314] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.384148] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728671} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.385711] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] e37758cc-7287-4271-ad47-d711201d0add/e37758cc-7287-4271-ad47-d711201d0add.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 984.385711] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 984.385711] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed729987-325e-4062-86af-a80aaf928e11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.396313] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 984.396313] env[65503]: value = "task-4450431" [ 984.396313] env[65503]: _type = "Task" [ 984.396313] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.410381] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450431, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.425399] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450427, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.434375] env[65503]: DEBUG oslo_vmware.api [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Task: {'id': task-4450428, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389492} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.434674] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.434864] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.435045] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.435244] env[65503]: INFO nova.compute.manager [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Took 1.18 seconds to destroy the instance on the hypervisor. [ 984.435483] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 984.435708] env[65503]: DEBUG nova.compute.manager [-] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 984.435810] env[65503]: DEBUG nova.network.neutron [-] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 984.436090] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 984.436684] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 984.436972] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 984.550067] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 984.550067] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 984.604942] env[65503]: DEBUG nova.scheduler.client.report [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.621088] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450429, 'name': Destroy_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.655745] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450430, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.656880] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 984.715020] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 984.780578] env[65503]: DEBUG nova.network.neutron [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Successfully updated port: e4ff15a9-ecda-42a3-8724-0d6cab24c894 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 984.908219] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450431, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079154} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.908219] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 984.908946] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792e9c82-ad77-4c10-8bf1-516d4acace97 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.920854] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450427, 'name': ReconfigVM_Task, 'duration_secs': 0.534041} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.932726] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Reconfigured VM instance instance-00000053 to attach disk [datastore1] a043a8e2-8661-4d80-939d-8e7b02b0459f/a043a8e2-8661-4d80-939d-8e7b02b0459f.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.942928] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] e37758cc-7287-4271-ad47-d711201d0add/e37758cc-7287-4271-ad47-d711201d0add.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.943348] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bdc9ebc-ab93-4251-bc9b-0815eda58a79 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.945493] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbe310ed-a44e-4460-84f0-4f971bb43283 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.967148] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 984.967148] env[65503]: value = "task-4450432" [ 984.967148] env[65503]: _type = "Task" [ 984.967148] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.968621] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 984.968621] env[65503]: value = "task-4450433" [ 984.968621] env[65503]: _type = "Task" [ 984.968621] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.984123] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450432, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.986533] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450433, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.096429] env[65503]: WARNING neutronclient.v2_0.client [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 985.097253] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 985.097608] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 985.116388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.117057] env[65503]: DEBUG nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 985.119928] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.920s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.120302] env[65503]: DEBUG nova.objects.instance [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'resources' on Instance uuid 81e27e9b-7d7e-4b04-8257-268660fd9ec3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.127773] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450429, 'name': Destroy_Task, 'duration_secs': 0.640632} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.129050] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Destroyed the VM [ 985.129050] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 985.129050] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-152db5ea-c386-40b6-bb40-a5627edaffe4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.139066] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 985.139066] env[65503]: value = "task-4450434" [ 985.139066] env[65503]: _type = "Task" [ 985.139066] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.152175] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450434, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.165923] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.77411} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.166904] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.166904] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.167097] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd44bebf-f958-4f98-bc58-e79f14eed670 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.175479] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 985.175479] env[65503]: value = "task-4450435" [ 985.175479] env[65503]: _type = "Task" [ 985.175479] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.185537] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.272125] env[65503]: DEBUG nova.network.neutron [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Updated VIF entry in instance network info cache for port 21904121-6624-489f-b851-76b0dfc15641. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 985.272707] env[65503]: DEBUG nova.network.neutron [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Updating instance_info_cache with network_info: [{"id": "21904121-6624-489f-b851-76b0dfc15641", "address": "fa:16:3e:cd:01:68", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21904121-66", "ovs_interfaceid": "21904121-6624-489f-b851-76b0dfc15641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 985.284239] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.284755] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.284837] env[65503]: DEBUG nova.network.neutron [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 985.482895] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.487235] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450432, 'name': Rename_Task, 'duration_secs': 0.351596} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.487235] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.487359] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a2ee6c0-af47-4ade-8ba1-4f54718dfbe2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.495591] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 985.495591] env[65503]: value = "task-4450436" [ 985.495591] env[65503]: _type = "Task" [ 985.495591] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.505619] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.557197] env[65503]: DEBUG nova.network.neutron [-] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 985.623495] env[65503]: DEBUG nova.compute.utils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 985.629060] env[65503]: DEBUG nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 985.629375] env[65503]: DEBUG nova.network.neutron [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 985.629986] env[65503]: WARNING neutronclient.v2_0.client [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 985.629986] env[65503]: WARNING neutronclient.v2_0.client [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 985.630446] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 985.631027] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 985.650682] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450434, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.685039] env[65503]: DEBUG nova.policy [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e97b5208de384c19bbc0e332b67fc4ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c5b0c3771b5434992cd58e1af539bde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 985.688982] env[65503]: DEBUG nova.network.neutron [-] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 985.696974] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072196} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.696974] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.698118] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4bfc60-3829-428f-9a74-2958ffdf8f46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.728382] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.732382] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3abedd77-b1e0-4aeb-9e57-2c2eca4f797b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.758204] env[65503]: DEBUG nova.compute.manager [req-36eb9190-1bc4-433b-b61e-6acb21c91798 req-cf4b1b1e-9fbc-4e8a-80d0-5419ca92a7d6 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-vif-plugged-e4ff15a9-ecda-42a3-8724-0d6cab24c894 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 985.758636] env[65503]: DEBUG oslo_concurrency.lockutils [req-36eb9190-1bc4-433b-b61e-6acb21c91798 req-cf4b1b1e-9fbc-4e8a-80d0-5419ca92a7d6 service nova] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.758636] env[65503]: DEBUG oslo_concurrency.lockutils [req-36eb9190-1bc4-433b-b61e-6acb21c91798 req-cf4b1b1e-9fbc-4e8a-80d0-5419ca92a7d6 service nova] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.758749] env[65503]: DEBUG oslo_concurrency.lockutils [req-36eb9190-1bc4-433b-b61e-6acb21c91798 req-cf4b1b1e-9fbc-4e8a-80d0-5419ca92a7d6 service nova] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.758889] env[65503]: DEBUG nova.compute.manager [req-36eb9190-1bc4-433b-b61e-6acb21c91798 req-cf4b1b1e-9fbc-4e8a-80d0-5419ca92a7d6 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] No waiting events found dispatching network-vif-plugged-e4ff15a9-ecda-42a3-8724-0d6cab24c894 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 985.759052] env[65503]: WARNING nova.compute.manager [req-36eb9190-1bc4-433b-b61e-6acb21c91798 req-cf4b1b1e-9fbc-4e8a-80d0-5419ca92a7d6 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received unexpected event network-vif-plugged-e4ff15a9-ecda-42a3-8724-0d6cab24c894 for instance with vm_state active and task_state None. [ 985.768670] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 985.768670] env[65503]: value = "task-4450437" [ 985.768670] env[65503]: _type = "Task" [ 985.768670] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.779038] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Releasing lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.779342] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Received event network-vif-plugged-6c742bed-a172-46d4-9ac0-cc3025ddff75 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 985.779632] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Acquiring lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.779931] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.780590] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.780987] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] No waiting events found dispatching network-vif-plugged-6c742bed-a172-46d4-9ac0-cc3025ddff75 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 985.781729] env[65503]: WARNING nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Received unexpected event network-vif-plugged-6c742bed-a172-46d4-9ac0-cc3025ddff75 for instance with vm_state building and task_state spawning. [ 985.781885] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Received event network-changed-6c742bed-a172-46d4-9ac0-cc3025ddff75 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 985.782052] env[65503]: DEBUG nova.compute.manager [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Refreshing instance network info cache due to event network-changed-6c742bed-a172-46d4-9ac0-cc3025ddff75. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 985.782318] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Acquiring lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.783058] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Acquired lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.783058] env[65503]: DEBUG nova.network.neutron [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Refreshing network info cache for port 6c742bed-a172-46d4-9ac0-cc3025ddff75 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 985.789395] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 985.789395] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 985.803833] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.891734] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835552bc-0ef0-4861-b5e0-dc9eb66ed68b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.900918] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f30610-c02d-484e-9162-24d00a2287ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.932794] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1b07ae-9148-40e9-9376-03a708c100d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.941962] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a347a53c-7e49-478d-8362-9b34141d0849 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.960406] env[65503]: DEBUG nova.compute.provider_tree [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.986636] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450433, 'name': ReconfigVM_Task, 'duration_secs': 0.947191} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.987057] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Reconfigured VM instance instance-00000054 to attach disk [datastore1] e37758cc-7287-4271-ad47-d711201d0add/e37758cc-7287-4271-ad47-d711201d0add.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.987938] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d64dc5b-138a-4227-98ce-83b397d12215 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.997519] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 985.997519] env[65503]: value = "task-4450438" [ 985.997519] env[65503]: _type = "Task" [ 985.997519] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.014836] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450436, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.019121] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450438, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.020403] env[65503]: DEBUG nova.network.neutron [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Successfully created port: 4d06e429-4f16-43d7-a443-99c8a67b3e4d {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 986.059734] env[65503]: INFO nova.compute.manager [-] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Took 1.62 seconds to deallocate network for instance. [ 986.113270] env[65503]: WARNING nova.network.neutron [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] d2c28c8e-55de-416b-97e1-c5ea06e7f107 already exists in list: networks containing: ['d2c28c8e-55de-416b-97e1-c5ea06e7f107']. ignoring it [ 986.113503] env[65503]: WARNING nova.network.neutron [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] d2c28c8e-55de-416b-97e1-c5ea06e7f107 already exists in list: networks containing: ['d2c28c8e-55de-416b-97e1-c5ea06e7f107']. ignoring it [ 986.129767] env[65503]: DEBUG nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 986.150400] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450434, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.193714] env[65503]: INFO nova.compute.manager [-] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Took 2.06 seconds to deallocate network for instance. [ 986.284925] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.286734] env[65503]: WARNING neutronclient.v2_0.client [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 986.287542] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 986.290820] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 986.465438] env[65503]: DEBUG nova.scheduler.client.report [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.510619] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450436, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.517105] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450438, 'name': Rename_Task, 'duration_secs': 0.36345} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.517406] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.517681] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c29b9d14-887d-4f11-b70f-39dedeb4117f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.525754] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 986.525754] env[65503]: value = "task-4450439" [ 986.525754] env[65503]: _type = "Task" [ 986.525754] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.535942] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450439, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.567495] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.650927] env[65503]: DEBUG oslo_vmware.api [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450434, 'name': RemoveSnapshot_Task, 'duration_secs': 1.358052} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.651550] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 986.651657] env[65503]: INFO nova.compute.manager [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Took 16.33 seconds to snapshot the instance on the hypervisor. [ 986.680324] env[65503]: DEBUG nova.compute.manager [req-b70ea29d-69b8-4bcf-8992-65107d996970 req-5317eda6-04f1-45ca-8dad-fd6f2bedb85f service nova] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Received event network-vif-deleted-b4d33353-d9f7-496e-9fb4-be39d02d940f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 986.686833] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 986.687683] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 986.704714] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.783300] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450437, 'name': ReconfigVM_Task, 'duration_secs': 0.788079} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.783628] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Reconfigured VM instance instance-00000055 to attach disk [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 986.784321] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7161f8a4-8c89-4bbb-af43-f33d65d50231 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.791457] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 986.791457] env[65503]: value = "task-4450440" [ 986.791457] env[65503]: _type = "Task" [ 986.791457] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.802860] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450440, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.975372] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.855s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.978050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.411s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.978322] env[65503]: DEBUG nova.objects.instance [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lazy-loading 'resources' on Instance uuid b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.002458] env[65503]: INFO nova.scheduler.client.report [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted allocations for instance 81e27e9b-7d7e-4b04-8257-268660fd9ec3 [ 987.011019] env[65503]: DEBUG oslo_vmware.api [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450436, 'name': PowerOnVM_Task, 'duration_secs': 1.056548} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.011019] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 987.011019] env[65503]: INFO nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Took 11.80 seconds to spawn the instance on the hypervisor. [ 987.011194] env[65503]: DEBUG nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 987.011968] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcaf5c9-6fb1-402e-aff2-9eabf1e3c9c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.037230] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450439, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.140631] env[65503]: DEBUG nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 987.174522] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 987.174919] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.174919] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 987.175074] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.175228] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 987.175370] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 987.175577] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 987.175727] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 987.175904] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 987.176144] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 987.176246] env[65503]: DEBUG nova.virt.hardware [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 987.177258] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a44471c-9934-4349-863c-50397a71e908 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.192628] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e318834e-8815-40a9-9be8-31c9833c38f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.216452] env[65503]: DEBUG nova.compute.manager [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Found 3 images (rotation: 2) {{(pid=65503) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 987.216652] env[65503]: DEBUG nova.compute.manager [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Rotating out 1 backups {{(pid=65503) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5065}} [ 987.216804] env[65503]: DEBUG nova.compute.manager [None req-ba1ace19-258d-418c-8408-939aa7c6ee43 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleting image b6cceed3-7848-4311-8d40-eea9eb611582 {{(pid=65503) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5070}} [ 987.305561] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450440, 'name': Rename_Task, 'duration_secs': 0.192735} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.305990] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 987.306352] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-875523f7-b997-4f1d-a3bb-b1acac52a7e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.315086] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 987.315086] env[65503]: value = "task-4450441" [ 987.315086] env[65503]: _type = "Task" [ 987.315086] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.328121] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.364920] env[65503]: WARNING neutronclient.v2_0.client [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 987.365683] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 987.366134] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 987.411018] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 987.411018] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 987.519751] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dd18259a-1b25-4166-b1d2-4bc6f2282109 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "81e27e9b-7d7e-4b04-8257-268660fd9ec3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.454s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.540222] env[65503]: INFO nova.compute.manager [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Took 17.99 seconds to build instance. [ 987.548850] env[65503]: DEBUG oslo_vmware.api [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450439, 'name': PowerOnVM_Task, 'duration_secs': 0.813613} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.549123] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 987.549313] env[65503]: INFO nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Took 9.85 seconds to spawn the instance on the hypervisor. [ 987.549572] env[65503]: DEBUG nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 987.550426] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a0cd17-70cc-4825-974e-36470b0a7d19 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.697668] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362cc264-3718-4df5-824d-622c806c4494 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.707064] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c89677a-fc63-4a3f-a804-d3023210eba9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.714367] env[65503]: DEBUG nova.network.neutron [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Successfully updated port: 4d06e429-4f16-43d7-a443-99c8a67b3e4d {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 987.747121] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60aa023-b36c-4e95-8efa-1347eba31e5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.761347] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c198f3-6450-4424-9389-739230fc234e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.778366] env[65503]: DEBUG nova.compute.provider_tree [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.825842] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450441, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.893013] env[65503]: WARNING neutronclient.v2_0.client [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 987.893893] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 987.894325] env[65503]: WARNING openstack [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 988.016717] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 988.017369] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 988.046172] env[65503]: DEBUG oslo_concurrency.lockutils [None req-642e62d9-5601-42ad-bc23-e1a34b557d59 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.511s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.068078] env[65503]: DEBUG nova.network.neutron [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Updated VIF entry in instance network info cache for port 6c742bed-a172-46d4-9ac0-cc3025ddff75. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 988.068438] env[65503]: DEBUG nova.network.neutron [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Updating instance_info_cache with network_info: [{"id": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "address": "fa:16:3e:52:86:75", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c742bed-a1", "ovs_interfaceid": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 988.079541] env[65503]: INFO nova.compute.manager [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Took 17.52 seconds to build instance. [ 988.230266] env[65503]: WARNING neutronclient.v2_0.client [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 988.230943] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 988.232390] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 988.243043] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "refresh_cache-310ee0b5-07ee-4cf0-b262-5e8b473efa3d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.243043] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "refresh_cache-310ee0b5-07ee-4cf0-b262-5e8b473efa3d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.246858] env[65503]: DEBUG nova.network.neutron [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 988.281954] env[65503]: DEBUG nova.scheduler.client.report [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.330081] env[65503]: DEBUG oslo_vmware.api [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450441, 'name': PowerOnVM_Task, 'duration_secs': 0.796294} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.330081] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 988.330081] env[65503]: INFO nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Took 8.20 seconds to spawn the instance on the hypervisor. [ 988.330379] env[65503]: DEBUG nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 988.331192] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86046f7-4ffa-4893-86d1-5ba664558718 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.516373] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 988.516781] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 988.574985] env[65503]: DEBUG oslo_concurrency.lockutils [req-9ab3c32c-f1e4-48ad-82b2-0d94c92362f1 req-02c9b760-ed30-4a3a-b37c-59243d989a7d service nova] Releasing lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.582875] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c9c41a44-c70d-4aaa-9200-71b64d83ed42 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "e37758cc-7287-4271-ad47-d711201d0add" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.039s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.590604] env[65503]: WARNING neutronclient.v2_0.client [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 988.591292] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 988.591671] env[65503]: WARNING openstack [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 988.677966] env[65503]: DEBUG nova.network.neutron [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "address": "fa:16:3e:15:00:96", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995d778e-47", "ovs_interfaceid": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4ff15a9-ecda-42a3-8724-0d6cab24c894", "address": "fa:16:3e:d4:2a:47", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ff15a9-ec", "ovs_interfaceid": "e4ff15a9-ecda-42a3-8724-0d6cab24c894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 988.757819] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 988.758459] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 988.790803] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.813s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.793261] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.089s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.793489] env[65503]: DEBUG nova.objects.instance [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'resources' on Instance uuid a57486e1-82e3-48d5-99fe-c89b300a2136 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.811958] env[65503]: INFO nova.scheduler.client.report [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Deleted allocations for instance b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14 [ 988.852709] env[65503]: INFO nova.compute.manager [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Took 16.83 seconds to build instance. [ 989.045091] env[65503]: DEBUG nova.network.neutron [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 989.123584] env[65503]: DEBUG nova.compute.manager [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 989.181764] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.181988] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.182164] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.183391] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d995b825-082b-4bcc-9242-6c40d480796c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.204605] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 989.204883] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 989.205048] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 989.205235] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 989.205428] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 989.205514] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 989.205719] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.207075] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 989.207075] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 989.207075] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 989.207075] env[65503]: DEBUG nova.virt.hardware [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 989.213683] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfiguring VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 989.214829] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9dac417-2f4d-4d72-bdc8-5c4336818442 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.233521] env[65503]: DEBUG oslo_vmware.api [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 989.233521] env[65503]: value = "task-4450442" [ 989.233521] env[65503]: _type = "Task" [ 989.233521] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.253153] env[65503]: DEBUG oslo_vmware.api [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450442, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.275511] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 989.275966] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 989.321549] env[65503]: DEBUG oslo_concurrency.lockutils [None req-58f01f0b-c4fe-4812-b33c-44777d59b98e tempest-ServerAddressesTestJSON-495089618 tempest-ServerAddressesTestJSON-495089618-project-member] Lock "b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.573s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.355509] env[65503]: DEBUG oslo_concurrency.lockutils [None req-47a33fde-9a81-4379-af9a-fb399906f922 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.363s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.371665] env[65503]: DEBUG nova.compute.manager [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-changed-e4ff15a9-ecda-42a3-8724-0d6cab24c894 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 989.371991] env[65503]: DEBUG nova.compute.manager [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing instance network info cache due to event network-changed-e4ff15a9-ecda-42a3-8724-0d6cab24c894. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 989.372263] env[65503]: DEBUG oslo_concurrency.lockutils [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.372374] env[65503]: DEBUG oslo_concurrency.lockutils [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.372610] env[65503]: DEBUG nova.network.neutron [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Refreshing network info cache for port e4ff15a9-ecda-42a3-8724-0d6cab24c894 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 989.523434] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201d54d4-0bdb-4f83-9ed1-9cb4970e6c5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.532985] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10979023-e3b3-4a3b-9d32-d9657df2e053 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.566417] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5376aa5d-c13b-46d5-913b-dfc72dc14e3d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.575642] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec834d5c-ec2c-4848-8982-733d7fabd98d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.590966] env[65503]: DEBUG nova.compute.provider_tree [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.647591] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.702449] env[65503]: WARNING neutronclient.v2_0.client [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 989.702449] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 989.702449] env[65503]: WARNING openstack [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 989.746529] env[65503]: DEBUG oslo_vmware.api [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450442, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.822616] env[65503]: DEBUG nova.network.neutron [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Updating instance_info_cache with network_info: [{"id": "4d06e429-4f16-43d7-a443-99c8a67b3e4d", "address": "fa:16:3e:cd:a5:b0", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d06e429-4f", "ovs_interfaceid": "4d06e429-4f16-43d7-a443-99c8a67b3e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 989.879836] env[65503]: WARNING neutronclient.v2_0.client [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 989.881243] env[65503]: WARNING openstack [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 989.881608] env[65503]: WARNING openstack [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 990.094535] env[65503]: DEBUG nova.scheduler.client.report [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.248177] env[65503]: DEBUG oslo_vmware.api [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450442, 'name': ReconfigVM_Task, 'duration_secs': 0.839004} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.248932] env[65503]: WARNING neutronclient.v2_0.client [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 990.249075] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.249562] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfigured VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 990.322443] env[65503]: WARNING openstack [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 990.322926] env[65503]: WARNING openstack [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 990.331287] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "refresh_cache-310ee0b5-07ee-4cf0-b262-5e8b473efa3d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.331595] env[65503]: DEBUG nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Instance network_info: |[{"id": "4d06e429-4f16-43d7-a443-99c8a67b3e4d", "address": "fa:16:3e:cd:a5:b0", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d06e429-4f", "ovs_interfaceid": "4d06e429-4f16-43d7-a443-99c8a67b3e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 990.332283] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:a5:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d06e429-4f16-43d7-a443-99c8a67b3e4d', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 990.340592] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 990.340939] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 990.341252] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-763aad5f-5d71-41f8-b3c9-e869834b6951 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.364654] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 990.364654] env[65503]: value = "task-4450443" [ 990.364654] env[65503]: _type = "Task" [ 990.364654] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.374222] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450443, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.550802] env[65503]: WARNING neutronclient.v2_0.client [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 990.551536] env[65503]: WARNING openstack [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 990.551988] env[65503]: WARNING openstack [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 990.602926] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.809s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.606586] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.959s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.640888] env[65503]: INFO nova.scheduler.client.report [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted allocations for instance a57486e1-82e3-48d5-99fe-c89b300a2136 [ 990.670824] env[65503]: DEBUG nova.network.neutron [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updated VIF entry in instance network info cache for port e4ff15a9-ecda-42a3-8724-0d6cab24c894. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 990.671444] env[65503]: DEBUG nova.network.neutron [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "address": "fa:16:3e:15:00:96", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995d778e-47", "ovs_interfaceid": "995d778e-470e-4cfa-bb9e-b83d9780f51a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4ff15a9-ecda-42a3-8724-0d6cab24c894", "address": "fa:16:3e:d4:2a:47", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ff15a9-ec", "ovs_interfaceid": "e4ff15a9-ecda-42a3-8724-0d6cab24c894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 990.756111] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c99abe64-f32e-4001-9161-6f32d9b506e8 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-e4ff15a9-ecda-42a3-8724-0d6cab24c894" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 9.042s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.765154] env[65503]: DEBUG nova.compute.manager [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Received event network-vif-plugged-4d06e429-4f16-43d7-a443-99c8a67b3e4d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 990.765378] env[65503]: DEBUG oslo_concurrency.lockutils [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Acquiring lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.765963] env[65503]: DEBUG oslo_concurrency.lockutils [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.767074] env[65503]: DEBUG oslo_concurrency.lockutils [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.767328] env[65503]: DEBUG nova.compute.manager [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] No waiting events found dispatching network-vif-plugged-4d06e429-4f16-43d7-a443-99c8a67b3e4d {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 990.767556] env[65503]: WARNING nova.compute.manager [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Received unexpected event network-vif-plugged-4d06e429-4f16-43d7-a443-99c8a67b3e4d for instance with vm_state building and task_state spawning. [ 990.768038] env[65503]: DEBUG nova.compute.manager [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Received event network-changed-4d06e429-4f16-43d7-a443-99c8a67b3e4d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 990.768259] env[65503]: DEBUG nova.compute.manager [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Refreshing instance network info cache due to event network-changed-4d06e429-4f16-43d7-a443-99c8a67b3e4d. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 990.769099] env[65503]: DEBUG oslo_concurrency.lockutils [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Acquiring lock "refresh_cache-310ee0b5-07ee-4cf0-b262-5e8b473efa3d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.769372] env[65503]: DEBUG oslo_concurrency.lockutils [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Acquired lock "refresh_cache-310ee0b5-07ee-4cf0-b262-5e8b473efa3d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.769587] env[65503]: DEBUG nova.network.neutron [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Refreshing network info cache for port 4d06e429-4f16-43d7-a443-99c8a67b3e4d {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 990.882042] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450443, 'name': CreateVM_Task, 'duration_secs': 0.374885} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.883250] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.883811] env[65503]: WARNING neutronclient.v2_0.client [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 990.884519] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.884519] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.884785] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.885467] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcc952ef-6db0-4452-a135-97db264c93b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.893334] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 990.893334] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ce951-c7ca-d728-b365-f53fa8276701" [ 990.893334] env[65503]: _type = "Task" [ 990.893334] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.904300] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ce951-c7ca-d728-b365-f53fa8276701, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.112538] env[65503]: INFO nova.compute.claims [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.151540] env[65503]: DEBUG oslo_concurrency.lockutils [None req-267fa712-8381-4b94-b07e-df6ece67d5fd tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "a57486e1-82e3-48d5-99fe-c89b300a2136" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.717s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.176861] env[65503]: DEBUG oslo_concurrency.lockutils [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.179366] env[65503]: DEBUG nova.compute.manager [req-d07d1504-f29a-4fa3-98be-0ac3fb935aa3 req-596674b0-a9a7-4a8b-b9a2-eb05f65a2e32 service nova] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Received event network-vif-deleted-6b06aa59-3060-4bf3-8f9f-e8942a00fe50 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 991.220214] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.220528] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.282556] env[65503]: WARNING neutronclient.v2_0.client [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 991.282556] env[65503]: WARNING openstack [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 991.282556] env[65503]: WARNING openstack [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 991.374027] env[65503]: INFO nova.compute.manager [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Rescuing [ 991.374436] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.374823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.375088] env[65503]: DEBUG nova.network.neutron [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 991.413090] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ce951-c7ca-d728-b365-f53fa8276701, 'name': SearchDatastore_Task, 'duration_secs': 0.017347} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.413090] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.413522] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.413631] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.413825] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.414054] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.414380] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e31c0154-e378-44ce-b360-15ac035ffb8f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.425031] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.425563] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.426138] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed2392cc-85e9-4e98-a9e0-df93214e2a75 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.433178] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 991.433178] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5236779b-11ee-dde7-6a1f-87ac0270db98" [ 991.433178] env[65503]: _type = "Task" [ 991.433178] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.443302] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5236779b-11ee-dde7-6a1f-87ac0270db98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.620078] env[65503]: INFO nova.compute.resource_tracker [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating resource usage from migration dd9bd3db-cf9a-45b6-85df-7df46e78559e [ 991.726362] env[65503]: DEBUG nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 991.783336] env[65503]: WARNING openstack [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 991.783959] env[65503]: WARNING openstack [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 991.814115] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c5e506-99d2-4e89-8a16-ae40862ef7c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.822542] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a57046-b581-4f96-8a76-b450098d1f27 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.854165] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9491a6ef-e404-48f2-a9ff-e8e50f1c4e7f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.864679] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9057201-6a33-4b8c-8a3d-934613eea2c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.882833] env[65503]: WARNING neutronclient.v2_0.client [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 991.885193] env[65503]: WARNING openstack [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 991.885193] env[65503]: WARNING openstack [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 991.891992] env[65503]: DEBUG nova.compute.provider_tree [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.945030] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5236779b-11ee-dde7-6a1f-87ac0270db98, 'name': SearchDatastore_Task, 'duration_secs': 0.015564} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.945777] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f247a1fc-0737-4b50-9d71-6a45a72a14b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.952280] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 991.952280] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52721049-5142-988f-02ca-2b3e1046380c" [ 991.952280] env[65503]: _type = "Task" [ 991.952280] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.961241] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52721049-5142-988f-02ca-2b3e1046380c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.255907] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.397261] env[65503]: DEBUG nova.scheduler.client.report [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.404073] env[65503]: WARNING neutronclient.v2_0.client [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 992.404073] env[65503]: WARNING openstack [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 992.404073] env[65503]: WARNING openstack [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 992.464604] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52721049-5142-988f-02ca-2b3e1046380c, 'name': SearchDatastore_Task, 'duration_secs': 0.011329} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.464880] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.465593] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 310ee0b5-07ee-4cf0-b262-5e8b473efa3d/310ee0b5-07ee-4cf0-b262-5e8b473efa3d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.465900] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06227578-3f02-455d-8d08-2e948d6fc90f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.475140] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 992.475140] env[65503]: value = "task-4450444" [ 992.475140] env[65503]: _type = "Task" [ 992.475140] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.484562] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.891460] env[65503]: WARNING openstack [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 992.891877] env[65503]: WARNING openstack [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 992.912847] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.306s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.914331] env[65503]: INFO nova.compute.manager [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Migrating [ 992.928673] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.673s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.930527] env[65503]: INFO nova.compute.claims [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.988077] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450444, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.148549] env[65503]: DEBUG nova.network.neutron [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Updated VIF entry in instance network info cache for port 4d06e429-4f16-43d7-a443-99c8a67b3e4d. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 993.148962] env[65503]: DEBUG nova.network.neutron [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Updating instance_info_cache with network_info: [{"id": "4d06e429-4f16-43d7-a443-99c8a67b3e4d", "address": "fa:16:3e:cd:a5:b0", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d06e429-4f", "ovs_interfaceid": "4d06e429-4f16-43d7-a443-99c8a67b3e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 993.453962] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.454746] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.455017] env[65503]: DEBUG nova.network.neutron [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 993.487849] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450444, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679888} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.488191] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 310ee0b5-07ee-4cf0-b262-5e8b473efa3d/310ee0b5-07ee-4cf0-b262-5e8b473efa3d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 993.488427] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 993.488717] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a31dc1c-d59d-4ba9-a082-606a443e28b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.496870] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 993.496870] env[65503]: value = "task-4450445" [ 993.496870] env[65503]: _type = "Task" [ 993.496870] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.506305] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450445, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.652801] env[65503]: DEBUG oslo_concurrency.lockutils [req-bd63e417-e19b-4c8b-903c-87b33eccda5a req-baf0e5cd-be47-45ff-aba3-96384a818acb service nova] Releasing lock "refresh_cache-310ee0b5-07ee-4cf0-b262-5e8b473efa3d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.779089] env[65503]: WARNING neutronclient.v2_0.client [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 993.780250] env[65503]: WARNING openstack [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 993.780778] env[65503]: WARNING openstack [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 993.958116] env[65503]: DEBUG nova.network.neutron [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Updating instance_info_cache with network_info: [{"id": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "address": "fa:16:3e:52:86:75", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c742bed-a1", "ovs_interfaceid": "6c742bed-a172-46d4-9ac0-cc3025ddff75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 993.960032] env[65503]: WARNING neutronclient.v2_0.client [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 993.960402] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 993.960756] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 994.010023] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450445, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070405} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.013400] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 994.013400] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65adae9b-b197-416d-9705-88b33f122997 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.046055] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 310ee0b5-07ee-4cf0-b262-5e8b473efa3d/310ee0b5-07ee-4cf0-b262-5e8b473efa3d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 994.054068] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59fe713e-6dc7-4bc1-8bf5-fa713c717aed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.078673] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 994.078673] env[65503]: value = "task-4450446" [ 994.078673] env[65503]: _type = "Task" [ 994.078673] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.091510] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450446, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.168850] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 994.170072] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 994.253347] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1278118b-2040-4d35-bd16-9d8e5fa3e734 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.268605] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084d5d04-d9cd-49a2-823e-849855a82b25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.273416] env[65503]: WARNING neutronclient.v2_0.client [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 994.273416] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 994.273559] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 994.312711] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc057e2-4565-47a0-978e-1bbbfc9b6a49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.322311] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d53a297-9f0a-44ef-b107-756879c2b54d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.346512] env[65503]: DEBUG nova.compute.provider_tree [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.429041] env[65503]: DEBUG nova.network.neutron [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance_info_cache with network_info: [{"id": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "address": "fa:16:3e:6c:9a:41", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e6ffe6-e1", "ovs_interfaceid": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 994.479219] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.487289] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "a043a8e2-8661-4d80-939d-8e7b02b0459f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.488019] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.489520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "a043a8e2-8661-4d80-939d-8e7b02b0459f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.489520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.489520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.493411] env[65503]: INFO nova.compute.manager [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Terminating instance [ 994.590036] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450446, 'name': ReconfigVM_Task, 'duration_secs': 0.305575} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.590333] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 310ee0b5-07ee-4cf0-b262-5e8b473efa3d/310ee0b5-07ee-4cf0-b262-5e8b473efa3d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.591052] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5351aec1-419a-4af1-85fb-c1b5b4b310b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.599073] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 994.599073] env[65503]: value = "task-4450447" [ 994.599073] env[65503]: _type = "Task" [ 994.599073] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.608963] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450447, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.850122] env[65503]: DEBUG nova.scheduler.client.report [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.931897] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.001326] env[65503]: DEBUG nova.compute.manager [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 995.001326] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.001326] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da82e502-9123-4cd3-b3c6-2864c78b7158 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.009966] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.011877] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c931883d-7ef3-423f-a82b-86b0d8b1819f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.031416] env[65503]: DEBUG oslo_vmware.api [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 995.031416] env[65503]: value = "task-4450448" [ 995.031416] env[65503]: _type = "Task" [ 995.031416] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.045921] env[65503]: DEBUG oslo_vmware.api [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.114068] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450447, 'name': Rename_Task, 'duration_secs': 0.152915} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.114436] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.114764] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fc56e65-ed37-4654-8faa-4895f5c93523 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.124159] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 995.124159] env[65503]: value = "task-4450449" [ 995.124159] env[65503]: _type = "Task" [ 995.124159] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.134010] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450449, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.355320] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.356066] env[65503]: DEBUG nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 995.392825] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.393162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.393562] env[65503]: DEBUG nova.compute.manager [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 995.394764] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff62817-1fbf-46e1-8c7e-6ea305f4ba4b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.404885] env[65503]: DEBUG nova.compute.manager [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 995.405547] env[65503]: DEBUG nova.objects.instance [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'flavor' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.545477] env[65503]: DEBUG oslo_vmware.api [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450448, 'name': PowerOffVM_Task, 'duration_secs': 0.500444} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.546368] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.546646] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.547536] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a35d0e98-5eb4-40b4-945a-daf331f8a503 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.564563] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-995d778e-470e-4cfa-bb9e-b83d9780f51a" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.565256] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-995d778e-470e-4cfa-bb9e-b83d9780f51a" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.633600] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.633954] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.634235] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Deleting the datastore file [datastore1] a043a8e2-8661-4d80-939d-8e7b02b0459f {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.641653] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9d0ea52-1a8e-4a68-b9c6-aa9d0a868fc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.642969] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450449, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.648488] env[65503]: DEBUG oslo_vmware.api [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for the task: (returnval){ [ 995.648488] env[65503]: value = "task-4450451" [ 995.648488] env[65503]: _type = "Task" [ 995.648488] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.658518] env[65503]: DEBUG oslo_vmware.api [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.862190] env[65503]: DEBUG nova.compute.utils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 995.869310] env[65503]: DEBUG nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 995.869310] env[65503]: DEBUG nova.network.neutron [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 995.869310] env[65503]: WARNING neutronclient.v2_0.client [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 995.869310] env[65503]: WARNING neutronclient.v2_0.client [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 995.869310] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 995.869727] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 995.965137] env[65503]: DEBUG nova.policy [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55db708d2a9b47baa25cafed2be1ba91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '463e93d05e1e4b27a3dc866a5b1991d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 996.028943] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.028943] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92f83168-ae56-4559-9de6-c8e0c865510b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.040829] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 996.040829] env[65503]: value = "task-4450452" [ 996.040829] env[65503]: _type = "Task" [ 996.040829] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.050427] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.070635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.070706] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.075936] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97421f5-663b-4c60-9cf9-a2041932cef7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.104904] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef622fa7-862b-4066-89dd-f6bcdc66f691 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.110236] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340e69d7-908a-4a1a-b8b6-5e14fc47325e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.119248] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7b14b9-3c13-4a78-9b81-c8a7d85e7fbd tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Suspending the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 996.140707] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6c95d171-91a6-45a5-843b-5ea1ea5e655d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.143048] env[65503]: WARNING neutronclient.v2_0.client [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 996.151927] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfiguring VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 996.157027] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fafef4c-aaf6-4355-872b-3e7dec5ace1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.177492] env[65503]: DEBUG oslo_vmware.api [None req-ac7b14b9-3c13-4a78-9b81-c8a7d85e7fbd tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 996.177492] env[65503]: value = "task-4450453" [ 996.177492] env[65503]: _type = "Task" [ 996.177492] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.184335] env[65503]: DEBUG oslo_vmware.api [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450449, 'name': PowerOnVM_Task, 'duration_secs': 0.517718} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.184591] env[65503]: DEBUG oslo_vmware.api [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Task: {'id': task-4450451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313323} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.189970] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.190182] env[65503]: INFO nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Took 9.05 seconds to spawn the instance on the hypervisor. [ 996.190395] env[65503]: DEBUG nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 996.190724] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.192112] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.192112] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.192112] env[65503]: INFO nova.compute.manager [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 996.192112] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 996.192112] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 996.192112] env[65503]: value = "task-4450454" [ 996.192112] env[65503]: _type = "Task" [ 996.192112] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.193056] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97ddfc1-b2bb-49ed-93a6-5e8c5f02ff44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.196436] env[65503]: DEBUG nova.compute.manager [-] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 996.196436] env[65503]: DEBUG nova.network.neutron [-] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 996.196536] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 996.197089] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 996.197561] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 996.212816] env[65503]: DEBUG oslo_vmware.api [None req-ac7b14b9-3c13-4a78-9b81-c8a7d85e7fbd tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450453, 'name': SuspendVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.224351] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.302375] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 996.382216] env[65503]: DEBUG nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 996.414295] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.418159] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-716fd8c0-1d4c-4343-aba0-be8e7fb9637f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.429814] env[65503]: DEBUG oslo_vmware.api [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 996.429814] env[65503]: value = "task-4450455" [ 996.429814] env[65503]: _type = "Task" [ 996.429814] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.441713] env[65503]: DEBUG oslo_vmware.api [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.451388] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc4f4b3-2622-430e-b22e-ec2ac32e0137 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.474978] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance '82415443-1884-4898-996e-828d23f67f23' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 996.554898] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450452, 'name': PowerOffVM_Task, 'duration_secs': 0.456883} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.555105] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.556291] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324a0e84-df15-44bd-a5a6-03a131849129 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.580080] env[65503]: DEBUG nova.network.neutron [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Successfully created port: cf5f2d40-016e-4723-bc1b-d56cbe868b91 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 996.585294] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d856bc7-e381-4642-9092-91eb26f043e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.594212] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.594631] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.692769] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.693351] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07d0e4f3-183e-4dcf-988e-d920c1ff7c01 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.702075] env[65503]: DEBUG oslo_vmware.api [None req-ac7b14b9-3c13-4a78-9b81-c8a7d85e7fbd tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450453, 'name': SuspendVM_Task} progress is 58%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.704465] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 996.704465] env[65503]: value = "task-4450456" [ 996.704465] env[65503]: _type = "Task" [ 996.704465] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.723866] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.724310] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 996.724573] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.724937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.725136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.725345] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.730184] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f610268-b705-490b-82a8-b04906139875 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.755289] env[65503]: INFO nova.compute.manager [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Took 14.83 seconds to build instance. [ 996.755289] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.755289] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.755289] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b3ef15c-c084-4668-873f-f24e21a7ef18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.755289] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 996.755289] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525c356e-b776-5738-3e65-dc853625846a" [ 996.755289] env[65503]: _type = "Task" [ 996.755289] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.784311] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525c356e-b776-5738-3e65-dc853625846a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.839406] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.839406] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.839725] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.840775] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.840775] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.843455] env[65503]: INFO nova.compute.manager [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Terminating instance [ 996.946151] env[65503]: DEBUG oslo_vmware.api [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450455, 'name': PowerOffVM_Task, 'duration_secs': 0.272366} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.946409] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.946598] env[65503]: DEBUG nova.compute.manager [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 996.947501] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a31675-32f7-4804-b640-7543c6aa14b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.955411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "96d8f433-9b86-422f-88ef-99836fb21f30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.955622] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "96d8f433-9b86-422f-88ef-99836fb21f30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.981885] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.982338] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b70be8d8-216a-409f-95dd-ea274fa9d172 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.991037] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 996.991037] env[65503]: value = "task-4450457" [ 996.991037] env[65503]: _type = "Task" [ 996.991037] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.000762] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.101610] env[65503]: DEBUG nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 997.197144] env[65503]: DEBUG oslo_vmware.api [None req-ac7b14b9-3c13-4a78-9b81-c8a7d85e7fbd tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450453, 'name': SuspendVM_Task, 'duration_secs': 0.887989} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.197693] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7b14b9-3c13-4a78-9b81-c8a7d85e7fbd tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Suspended the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 997.197693] env[65503]: DEBUG nova.compute.manager [None req-ac7b14b9-3c13-4a78-9b81-c8a7d85e7fbd tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 997.198483] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29ca4dc-b31c-4973-a057-ea83900b296c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.218550] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.266214] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525c356e-b776-5738-3e65-dc853625846a, 'name': SearchDatastore_Task, 'duration_secs': 0.017788} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.266936] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df312034-c1f5-4e1a-b566-060e9f1494fa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.269907] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fc9a777-101f-4627-ad18-f15545f66456 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.373s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.274700] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 997.274700] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5202fe94-158e-242e-77aa-7bf4c2154479" [ 997.274700] env[65503]: _type = "Task" [ 997.274700] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.285597] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5202fe94-158e-242e-77aa-7bf4c2154479, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.349356] env[65503]: DEBUG nova.compute.manager [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 997.349961] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 997.351697] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d674fbbd-8475-4723-90ff-3a9292deb42d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.360689] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 997.361132] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03a41dc5-a50d-4aca-970c-05ed7452837e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.370187] env[65503]: DEBUG oslo_vmware.api [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 997.370187] env[65503]: value = "task-4450458" [ 997.370187] env[65503]: _type = "Task" [ 997.370187] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.374591] env[65503]: DEBUG nova.network.neutron [-] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 997.383331] env[65503]: DEBUG oslo_vmware.api [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.396069] env[65503]: DEBUG nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 997.427335] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 997.427335] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 997.427591] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 997.427591] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 997.427730] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 997.427902] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 997.428125] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.428305] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 997.428498] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 997.428668] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 997.428840] env[65503]: DEBUG nova.virt.hardware [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 997.429748] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e54a09-1786-44fc-bb87-25c1c66bdccb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.439513] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382da87e-d535-45ac-a3c9-936b04055c6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.458185] env[65503]: DEBUG nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 997.466642] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9a6c4e8-2014-430c-aa9e-6084e608540c tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.073s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.506090] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450457, 'name': PowerOffVM_Task, 'duration_secs': 0.211384} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.506474] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 997.506600] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance '82415443-1884-4898-996e-828d23f67f23' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 997.631153] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.631537] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.633156] env[65503]: INFO nova.compute.claims [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 997.721758] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.789105] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5202fe94-158e-242e-77aa-7bf4c2154479, 'name': SearchDatastore_Task, 'duration_secs': 0.014611} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.789105] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.789105] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. {{(pid=65503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 997.789105] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a49ab28-f3f4-404b-b755-9f832403069e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.797846] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 997.797846] env[65503]: value = "task-4450459" [ 997.797846] env[65503]: _type = "Task" [ 997.797846] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.807376] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.878537] env[65503]: INFO nova.compute.manager [-] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Took 1.68 seconds to deallocate network for instance. [ 997.884062] env[65503]: DEBUG oslo_vmware.api [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450458, 'name': PowerOffVM_Task, 'duration_secs': 0.34855} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.884443] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 997.884688] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 997.887642] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4911e799-9825-4693-8818-a7d3b5fec9ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.959037] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.959037] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.959037] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Deleting the datastore file [datastore2] e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.959037] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca79f499-f988-48dd-86fa-4699bc232d69 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.965851] env[65503]: DEBUG oslo_vmware.api [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 997.965851] env[65503]: value = "task-4450461" [ 997.965851] env[65503]: _type = "Task" [ 997.965851] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.978393] env[65503]: DEBUG oslo_vmware.api [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.979673] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 998.015029] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 998.020293] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3886628-3532-47d5-8801-2ede2dacddb1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.037948] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 998.037948] env[65503]: value = "task-4450462" [ 998.037948] env[65503]: _type = "Task" [ 998.037948] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.051500] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450462, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.222442] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.315292] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450459, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.369990] env[65503]: DEBUG nova.network.neutron [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Successfully updated port: cf5f2d40-016e-4723-bc1b-d56cbe868b91 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 998.391136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.485911] env[65503]: DEBUG oslo_vmware.api [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280657} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.486272] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.486466] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.486695] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.486940] env[65503]: INFO nova.compute.manager [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Took 1.14 seconds to destroy the instance on the hypervisor. [ 998.487236] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 998.487505] env[65503]: DEBUG nova.compute.manager [-] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 998.487617] env[65503]: DEBUG nova.network.neutron [-] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 998.487934] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 998.488736] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 998.488989] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 998.520346] env[65503]: DEBUG nova.compute.manager [req-c40c90e0-5f06-4ef6-877b-2367f0ff7852 req-89799770-aa84-4d16-934c-4ad820a0852d service nova] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Received event network-vif-deleted-c5073b44-93ec-4c66-9634-09aeaf801426 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 998.539827] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 998.553883] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450462, 'name': ReconfigVM_Task, 'duration_secs': 0.224275} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.554333] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance '82415443-1884-4898-996e-828d23f67f23' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 998.724620] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.815492] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751002} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.818516] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. [ 998.819753] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf44746-bcaa-45de-8439-31d81b0f4149 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.847102] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.851476] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15e28e86-e917-41d0-8ddc-159b109c3645 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.875685] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-9e1b174f-c7cb-45f1-b7c0-b980f32823c8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.875685] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-9e1b174f-c7cb-45f1-b7c0-b980f32823c8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.875685] env[65503]: DEBUG nova.network.neutron [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 998.878744] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 998.878744] env[65503]: value = "task-4450463" [ 998.878744] env[65503]: _type = "Task" [ 998.878744] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.893015] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450463, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.906458] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f954d9fc-ac77-4978-805e-ccb553dbc407 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.916569] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a0bc28-dda5-482f-927e-21e496a2ddcf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.953114] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87d79d8-68a4-4e66-a083-a17f99cd8908 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.962421] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952c945c-62fd-457c-9932-fd611f6d3075 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.977829] env[65503]: DEBUG nova.compute.provider_tree [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.063211] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 999.064031] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 999.064031] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 999.064031] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 999.064031] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 999.064276] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 999.064533] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 999.064949] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 999.064949] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 999.065244] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 999.065329] env[65503]: DEBUG nova.virt.hardware [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 999.070921] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Reconfiguring VM instance instance-00000052 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 999.072290] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb8893ce-ed85-4a7a-96e9-8dfffa868d7f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.094627] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 999.094627] env[65503]: value = "task-4450464" [ 999.094627] env[65503]: _type = "Task" [ 999.094627] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.105793] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.222914] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.327184] env[65503]: DEBUG nova.compute.manager [req-b861d8e4-c22c-4bb0-803b-42dc953dee67 req-b65412d3-dc98-44cc-bd2e-d148ee340aac service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Received event network-vif-plugged-cf5f2d40-016e-4723-bc1b-d56cbe868b91 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 999.327288] env[65503]: DEBUG oslo_concurrency.lockutils [req-b861d8e4-c22c-4bb0-803b-42dc953dee67 req-b65412d3-dc98-44cc-bd2e-d148ee340aac service nova] Acquiring lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.327490] env[65503]: DEBUG oslo_concurrency.lockutils [req-b861d8e4-c22c-4bb0-803b-42dc953dee67 req-b65412d3-dc98-44cc-bd2e-d148ee340aac service nova] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.327683] env[65503]: DEBUG oslo_concurrency.lockutils [req-b861d8e4-c22c-4bb0-803b-42dc953dee67 req-b65412d3-dc98-44cc-bd2e-d148ee340aac service nova] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.327848] env[65503]: DEBUG nova.compute.manager [req-b861d8e4-c22c-4bb0-803b-42dc953dee67 req-b65412d3-dc98-44cc-bd2e-d148ee340aac service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] No waiting events found dispatching network-vif-plugged-cf5f2d40-016e-4723-bc1b-d56cbe868b91 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 999.328019] env[65503]: WARNING nova.compute.manager [req-b861d8e4-c22c-4bb0-803b-42dc953dee67 req-b65412d3-dc98-44cc-bd2e-d148ee340aac service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Received unexpected event network-vif-plugged-cf5f2d40-016e-4723-bc1b-d56cbe868b91 for instance with vm_state building and task_state spawning. [ 999.378808] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 999.379128] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 999.396858] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450463, 'name': ReconfigVM_Task, 'duration_secs': 0.360912} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.398890] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Reconfigured VM instance instance-00000055 to attach disk [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.398890] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268fa9ac-a1f6-4a6f-89a8-a73f4ae9853e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.428570] env[65503]: DEBUG nova.network.neutron [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 999.430040] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd86d875-c79a-41f5-9b53-8e68fd70d0f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.448036] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 999.448036] env[65503]: value = "task-4450465" [ 999.448036] env[65503]: _type = "Task" [ 999.448036] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.458349] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.480902] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 999.480902] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 999.488092] env[65503]: DEBUG nova.scheduler.client.report [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.516552] env[65503]: DEBUG nova.network.neutron [-] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 999.568720] env[65503]: WARNING neutronclient.v2_0.client [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 999.568720] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 999.568720] env[65503]: WARNING openstack [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 999.606444] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450464, 'name': ReconfigVM_Task, 'duration_secs': 0.200936} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.607162] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Reconfigured VM instance instance-00000052 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 999.607654] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16ac2fb-ecf2-4bb3-a0a2-3e431cdaab2a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.632659] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 82415443-1884-4898-996e-828d23f67f23/82415443-1884-4898-996e-828d23f67f23.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.637669] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae525c35-ee72-435c-b372-ff6d2499ec61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.658937] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 999.658937] env[65503]: value = "task-4450466" [ 999.658937] env[65503]: _type = "Task" [ 999.658937] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.667363] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.698427] env[65503]: DEBUG nova.network.neutron [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Updating instance_info_cache with network_info: [{"id": "cf5f2d40-016e-4723-bc1b-d56cbe868b91", "address": "fa:16:3e:48:02:bd", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf5f2d40-01", "ovs_interfaceid": "cf5f2d40-016e-4723-bc1b-d56cbe868b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 999.724107] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.803443] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563691dd-531e-4ca7-8095-20f5b3f856b5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.810966] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b68fbf04-88bd-4cab-88e1-a6badc1e49d1 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Suspending the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 999.811098] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-65a96535-1385-4835-af21-4a596b81ff4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.819347] env[65503]: DEBUG oslo_vmware.api [None req-b68fbf04-88bd-4cab-88e1-a6badc1e49d1 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 999.819347] env[65503]: value = "task-4450467" [ 999.819347] env[65503]: _type = "Task" [ 999.819347] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.830158] env[65503]: DEBUG oslo_vmware.api [None req-b68fbf04-88bd-4cab-88e1-a6badc1e49d1 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450467, 'name': SuspendVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.959273] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450465, 'name': ReconfigVM_Task, 'duration_secs': 0.164849} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.959637] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.959937] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33de15d4-62c2-44fb-9597-8e636e1f00bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.968030] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 999.968030] env[65503]: value = "task-4450468" [ 999.968030] env[65503]: _type = "Task" [ 999.968030] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.997162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.997676] env[65503]: DEBUG nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1000.000598] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.021s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.003391] env[65503]: INFO nova.compute.claims [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1000.023224] env[65503]: INFO nova.compute.manager [-] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Took 1.53 seconds to deallocate network for instance. [ 1000.171996] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450466, 'name': ReconfigVM_Task, 'duration_secs': 0.319183} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.173019] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 82415443-1884-4898-996e-828d23f67f23/82415443-1884-4898-996e-828d23f67f23.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.173019] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance '82415443-1884-4898-996e-828d23f67f23' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1000.201737] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-9e1b174f-c7cb-45f1-b7c0-b980f32823c8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.202739] env[65503]: DEBUG nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Instance network_info: |[{"id": "cf5f2d40-016e-4723-bc1b-d56cbe868b91", "address": "fa:16:3e:48:02:bd", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf5f2d40-01", "ovs_interfaceid": "cf5f2d40-016e-4723-bc1b-d56cbe868b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1000.202739] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:02:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf5f2d40-016e-4723-bc1b-d56cbe868b91', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.212445] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1000.212700] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1000.213170] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f4026b4-1dd8-4896-9a86-419cd91b4768 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.244767] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.246259] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.246259] env[65503]: value = "task-4450469" [ 1000.246259] env[65503]: _type = "Task" [ 1000.246259] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.261561] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450469, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.331218] env[65503]: DEBUG oslo_vmware.api [None req-b68fbf04-88bd-4cab-88e1-a6badc1e49d1 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450467, 'name': SuspendVM_Task} progress is 62%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.356144] env[65503]: INFO nova.compute.manager [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Resuming [ 1000.356144] env[65503]: DEBUG nova.objects.instance [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lazy-loading 'flavor' on Instance uuid ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.481150] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450468, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.508665] env[65503]: DEBUG nova.compute.utils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1000.512264] env[65503]: DEBUG nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1000.512264] env[65503]: DEBUG nova.network.neutron [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1000.512462] env[65503]: WARNING neutronclient.v2_0.client [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1000.516021] env[65503]: WARNING neutronclient.v2_0.client [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1000.516021] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1000.516021] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1000.529942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.567157] env[65503]: DEBUG nova.compute.manager [req-d8493649-f432-4a35-b6a8-d9b27b4e2374 req-24412c98-2074-4644-bca8-3292a5fac5ca service nova] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Received event network-vif-deleted-12c029b6-d630-419b-8167-53eb6612a069 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1000.570753] env[65503]: DEBUG nova.policy [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e193d8d730e14c348b38c407f58cdc56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34e8cd66745a40d2acebbce98050ee5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1000.682443] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14b9243-0a6a-418c-ac45-31b9382a4932 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.705818] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b10c545-536d-46cb-b823-5dab7362616d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.727995] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance '82415443-1884-4898-996e-828d23f67f23' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1000.743790] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.757519] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450469, 'name': CreateVM_Task, 'duration_secs': 0.512814} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.760360] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.761097] env[65503]: WARNING neutronclient.v2_0.client [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1000.761466] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.761610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.761965] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1000.763167] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f90dd1-d039-42a8-a54e-3348b0e7ec1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.769616] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1000.769616] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52695298-193b-b92b-29eb-bd641bee25d6" [ 1000.769616] env[65503]: _type = "Task" [ 1000.769616] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.781911] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52695298-193b-b92b-29eb-bd641bee25d6, 'name': SearchDatastore_Task, 'duration_secs': 0.01021} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.782231] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.782460] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.782786] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.782967] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.783222] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.783562] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7daab857-33b8-4326-b9d4-0a02ef1319d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.793908] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.796552] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.796552] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f21e2790-b6c3-4ee8-a347-5c145dac4a2d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.803123] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1000.803123] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ed09e-5482-9892-e810-ca943d3fe866" [ 1000.803123] env[65503]: _type = "Task" [ 1000.803123] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.805395] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867f847d-5ade-47db-8e8a-be8ae4557a39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.820809] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b57d24-6109-4e7b-9a7c-1553f29d7110 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.824665] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520ed09e-5482-9892-e810-ca943d3fe866, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.828642] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f95c82a7-b031-4251-9e57-f719a90f55bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.862434] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a304085d-93c9-4872-ba5e-e7233acaaafa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.868038] env[65503]: DEBUG oslo_vmware.api [None req-b68fbf04-88bd-4cab-88e1-a6badc1e49d1 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450467, 'name': SuspendVM_Task, 'duration_secs': 0.655389} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.868414] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1000.868414] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]521237ba-aefa-6837-93aa-a780bfbd618c" [ 1000.868414] env[65503]: _type = "Task" [ 1000.868414] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.871274] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b68fbf04-88bd-4cab-88e1-a6badc1e49d1 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Suspended the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1000.871274] env[65503]: DEBUG nova.compute.manager [None req-b68fbf04-88bd-4cab-88e1-a6badc1e49d1 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1000.871968] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83c9b81-fef0-4238-aa31-a11293c13d4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.885374] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7006d3-735a-4314-8938-eac8c7725f56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.894994] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]521237ba-aefa-6837-93aa-a780bfbd618c, 'name': SearchDatastore_Task, 'duration_secs': 0.013418} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.898156] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.898539] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9e1b174f-c7cb-45f1-b7c0-b980f32823c8/9e1b174f-c7cb-45f1-b7c0-b980f32823c8.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.899392] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1af4886-e3d3-4c5c-97c2-29a97fb995d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.910618] env[65503]: DEBUG nova.compute.provider_tree [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.919352] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1000.919352] env[65503]: value = "task-4450470" [ 1000.919352] env[65503]: _type = "Task" [ 1000.919352] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.927442] env[65503]: DEBUG nova.network.neutron [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Successfully created port: c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1000.934354] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.951372] env[65503]: DEBUG nova.compute.manager [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Stashing vm_state: stopped {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1000.985595] env[65503]: DEBUG oslo_vmware.api [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450468, 'name': PowerOnVM_Task, 'duration_secs': 0.572169} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.986052] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.990698] env[65503]: DEBUG nova.compute.manager [None req-4ba5aa50-3d23-44a0-91f0-0bcbad99b42d tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1000.991602] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a97dd94-8fb9-472c-a8f2-0a5e8bface1a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.025649] env[65503]: DEBUG nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1001.239807] env[65503]: WARNING neutronclient.v2_0.client [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1001.246362] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.323830] env[65503]: DEBUG nova.network.neutron [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Port d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1001.415740] env[65503]: DEBUG nova.scheduler.client.report [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.434719] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450470, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.473660] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.541861] env[65503]: DEBUG nova.compute.manager [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Received event network-changed-cf5f2d40-016e-4723-bc1b-d56cbe868b91 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1001.541861] env[65503]: DEBUG nova.compute.manager [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Refreshing instance network info cache due to event network-changed-cf5f2d40-016e-4723-bc1b-d56cbe868b91. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1001.541861] env[65503]: DEBUG oslo_concurrency.lockutils [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Acquiring lock "refresh_cache-9e1b174f-c7cb-45f1-b7c0-b980f32823c8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.541861] env[65503]: DEBUG oslo_concurrency.lockutils [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Acquired lock "refresh_cache-9e1b174f-c7cb-45f1-b7c0-b980f32823c8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.541861] env[65503]: DEBUG nova.network.neutron [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Refreshing network info cache for port cf5f2d40-016e-4723-bc1b-d56cbe868b91 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1001.740576] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.889024] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.889024] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquired lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.889024] env[65503]: DEBUG nova.network.neutron [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1001.919564] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.919s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.920278] env[65503]: DEBUG nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1001.925410] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.534s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.926035] env[65503]: DEBUG nova.objects.instance [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lazy-loading 'resources' on Instance uuid a043a8e2-8661-4d80-939d-8e7b02b0459f {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.940362] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450470, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639251} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.941685] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 9e1b174f-c7cb-45f1-b7c0-b980f32823c8/9e1b174f-c7cb-45f1-b7c0-b980f32823c8.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.942702] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.943386] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea46c9af-a77e-4c19-bec7-4fa03333aabe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.953354] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1001.953354] env[65503]: value = "task-4450471" [ 1001.953354] env[65503]: _type = "Task" [ 1001.953354] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.965333] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450471, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.037990] env[65503]: DEBUG nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1002.046491] env[65503]: WARNING neutronclient.v2_0.client [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.047177] env[65503]: WARNING openstack [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.047533] env[65503]: WARNING openstack [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.056241] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.056373] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.069992] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1002.069992] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1002.069992] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1002.069992] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1002.070287] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1002.070334] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1002.070544] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1002.070768] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1002.071053] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1002.071198] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1002.071367] env[65503]: DEBUG nova.virt.hardware [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1002.072312] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8726923e-56fb-4262-be2c-03a7f8135824 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.081457] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1556e1-48f9-492e-97c2-70b836877396 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.239860] env[65503]: DEBUG oslo_vmware.api [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450454, 'name': ReconfigVM_Task, 'duration_secs': 5.810781} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.241271] env[65503]: WARNING openstack [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.241655] env[65503]: WARNING openstack [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.248363] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.248634] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Reconfigured VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1002.249030] env[65503]: WARNING neutronclient.v2_0.client [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.249338] env[65503]: WARNING neutronclient.v2_0.client [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.249865] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.250206] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.349518] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "82415443-1884-4898-996e-828d23f67f23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.349706] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.349875] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.367268] env[65503]: WARNING neutronclient.v2_0.client [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.367696] env[65503]: WARNING openstack [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.368057] env[65503]: WARNING openstack [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.391632] env[65503]: WARNING neutronclient.v2_0.client [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.392280] env[65503]: WARNING openstack [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.392620] env[65503]: WARNING openstack [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.429992] env[65503]: DEBUG nova.compute.utils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1002.432212] env[65503]: DEBUG nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1002.432451] env[65503]: DEBUG nova.network.neutron [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1002.432792] env[65503]: WARNING neutronclient.v2_0.client [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.433122] env[65503]: WARNING neutronclient.v2_0.client [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.433728] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.434100] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.465432] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450471, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070673} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.465951] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.466910] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1faeac24-c2a9-41a9-bbb7-5a6a292aa543 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.500987] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 9e1b174f-c7cb-45f1-b7c0-b980f32823c8/9e1b174f-c7cb-45f1-b7c0-b980f32823c8.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.504697] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb0e9d61-7c6e-401e-a459-1da332548c22 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.528321] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1002.528321] env[65503]: value = "task-4450472" [ 1002.528321] env[65503]: _type = "Task" [ 1002.528321] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.538814] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450472, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.547610] env[65503]: DEBUG nova.network.neutron [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Successfully updated port: c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1002.564407] env[65503]: DEBUG nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1002.698110] env[65503]: DEBUG nova.policy [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9afe8731bc284b6881835aa0a8f6c725', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dbf0e9b08b741e88f971ec5f54dede8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1002.702214] env[65503]: DEBUG nova.network.neutron [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Updated VIF entry in instance network info cache for port cf5f2d40-016e-4723-bc1b-d56cbe868b91. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1002.702572] env[65503]: DEBUG nova.network.neutron [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Updating instance_info_cache with network_info: [{"id": "cf5f2d40-016e-4723-bc1b-d56cbe868b91", "address": "fa:16:3e:48:02:bd", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf5f2d40-01", "ovs_interfaceid": "cf5f2d40-016e-4723-bc1b-d56cbe868b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1002.811630] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acbec47-5195-485e-9f1c-4eb110ddc563 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.821738] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22223896-91e0-4f53-b4a4-47a202592930 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.887034] env[65503]: WARNING openstack [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.887430] env[65503]: WARNING openstack [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.895239] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472bfa40-be97-4a3f-a51d-80eee2a7e05a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.906706] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4eb4ac-af58-4d01-bfe7-5f1ebd8f2fe8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.923632] env[65503]: DEBUG nova.compute.provider_tree [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.927325] env[65503]: DEBUG nova.compute.manager [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Received event network-vif-plugged-c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1002.927596] env[65503]: DEBUG oslo_concurrency.lockutils [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Acquiring lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.927816] env[65503]: DEBUG oslo_concurrency.lockutils [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.928058] env[65503]: DEBUG oslo_concurrency.lockutils [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.928338] env[65503]: DEBUG nova.compute.manager [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] No waiting events found dispatching network-vif-plugged-c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1002.928594] env[65503]: WARNING nova.compute.manager [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Received unexpected event network-vif-plugged-c1fca361-555c-407f-bd51-6ea779e02f3a for instance with vm_state building and task_state spawning. [ 1002.928842] env[65503]: DEBUG nova.compute.manager [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Received event network-changed-c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1002.928924] env[65503]: DEBUG nova.compute.manager [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Refreshing instance network info cache due to event network-changed-c1fca361-555c-407f-bd51-6ea779e02f3a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1002.929606] env[65503]: DEBUG oslo_concurrency.lockutils [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Acquiring lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.929606] env[65503]: DEBUG oslo_concurrency.lockutils [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Acquired lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.929606] env[65503]: DEBUG nova.network.neutron [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Refreshing network info cache for port c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1002.942804] env[65503]: DEBUG nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1002.981924] env[65503]: WARNING neutronclient.v2_0.client [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.982644] env[65503]: WARNING openstack [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.982953] env[65503]: WARNING openstack [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1003.039255] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450472, 'name': ReconfigVM_Task, 'duration_secs': 0.448661} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.044152] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 9e1b174f-c7cb-45f1-b7c0-b980f32823c8/9e1b174f-c7cb-45f1-b7c0-b980f32823c8.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.045055] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45c0047d-35e1-485b-a918-b612bd39a8bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.050828] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.054060] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1003.054060] env[65503]: value = "task-4450473" [ 1003.054060] env[65503]: _type = "Task" [ 1003.054060] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.067138] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450473, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.089268] env[65503]: DEBUG nova.network.neutron [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Successfully created port: 4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1003.097676] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.114271] env[65503]: DEBUG nova.network.neutron [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [{"id": "73e98445-c951-4dc2-82e3-537e2196f82a", "address": "fa:16:3e:d2:11:71", "network": {"id": "1ed26744-3e1a-4978-ae1a-76469b646e4a", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472231669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5a588e741704449878e7a03d7892d11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73e98445-c9", "ovs_interfaceid": "73e98445-c951-4dc2-82e3-537e2196f82a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1003.205395] env[65503]: DEBUG oslo_concurrency.lockutils [req-40d06baf-c476-43e6-93d8-662acec185c7 req-a67892fa-69fb-4da5-9b3a-7fbc3cd362e4 service nova] Releasing lock "refresh_cache-9e1b174f-c7cb-45f1-b7c0-b980f32823c8" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.396664] env[65503]: WARNING neutronclient.v2_0.client [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1003.434185] env[65503]: DEBUG nova.scheduler.client.report [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1003.438598] env[65503]: WARNING neutronclient.v2_0.client [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1003.439304] env[65503]: WARNING openstack [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1003.439664] env[65503]: WARNING openstack [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1003.574346] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450473, 'name': Rename_Task, 'duration_secs': 0.28946} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.574644] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.575027] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83e19950-6710-424b-818e-cae2759a44a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.585129] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1003.585129] env[65503]: value = "task-4450474" [ 1003.585129] env[65503]: _type = "Task" [ 1003.585129] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.597991] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.605760] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.606036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.606151] env[65503]: DEBUG nova.network.neutron [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1003.618489] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Releasing lock "refresh_cache-ff256d3f-af88-4f01-bdfd-cf89e06ab364" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.619466] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4262fba-b6e0-4467-95a3-29a34fa4b84e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.628354] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Resuming the VM {{(pid=65503) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1003.628634] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fdaf4227-baae-4c34-949f-7fb50c5c4313 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.636057] env[65503]: DEBUG nova.network.neutron [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1003.639760] env[65503]: DEBUG oslo_vmware.api [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 1003.639760] env[65503]: value = "task-4450475" [ 1003.639760] env[65503]: _type = "Task" [ 1003.639760] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.640252] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.640392] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.640632] env[65503]: DEBUG nova.network.neutron [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1003.658925] env[65503]: DEBUG oslo_vmware.api [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450475, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.681727] env[65503]: DEBUG nova.compute.manager [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1003.683191] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbd25d5-dac4-497c-a091-999a72e09d57 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.702907] env[65503]: DEBUG nova.compute.manager [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-vif-deleted-995d778e-470e-4cfa-bb9e-b83d9780f51a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1003.703064] env[65503]: INFO nova.compute.manager [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Neutron deleted interface 995d778e-470e-4cfa-bb9e-b83d9780f51a; detaching it from the instance and deleting it from the info cache [ 1003.703346] env[65503]: DEBUG nova.network.neutron [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4ff15a9-ecda-42a3-8724-0d6cab24c894", "address": "fa:16:3e:d4:2a:47", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ff15a9-ec", "ovs_interfaceid": "e4ff15a9-ecda-42a3-8724-0d6cab24c894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1003.840270] env[65503]: DEBUG nova.network.neutron [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1003.842708] env[65503]: INFO nova.compute.manager [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Rescuing [ 1003.842708] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.843042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.843235] env[65503]: DEBUG nova.network.neutron [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1003.949403] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.024s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.954036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.424s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.954398] env[65503]: DEBUG nova.objects.instance [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'resources' on Instance uuid e4c1c94b-744f-4bed-8e68-3b3f9de7db44 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.958760] env[65503]: DEBUG nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1003.987848] env[65503]: INFO nova.scheduler.client.report [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Deleted allocations for instance a043a8e2-8661-4d80-939d-8e7b02b0459f [ 1004.009736] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1004.009736] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.009736] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1004.010056] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.010099] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1004.011132] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1004.011132] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1004.011132] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1004.011132] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1004.011132] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1004.011915] env[65503]: DEBUG nova.virt.hardware [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1004.012610] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a7e5ad-dfeb-4a5b-a472-39f793e0ad9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.032642] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbd3daa-71ab-43e6-b4c5-b95b6bc304d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.097209] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450474, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.109161] env[65503]: WARNING neutronclient.v2_0.client [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1004.110020] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.110510] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.156978] env[65503]: WARNING neutronclient.v2_0.client [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1004.157724] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.158026] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.168150] env[65503]: DEBUG oslo_vmware.api [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450475, 'name': PowerOnVM_Task} progress is 93%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.171885] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.172229] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.172505] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.172770] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.172966] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.175496] env[65503]: INFO nova.compute.manager [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Terminating instance [ 1004.198175] env[65503]: INFO nova.compute.manager [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] instance snapshotting [ 1004.198524] env[65503]: WARNING nova.compute.manager [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1004.201714] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d1252a-195b-43d4-98d0-62fbaeea9aa8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.207847] env[65503]: DEBUG oslo_concurrency.lockutils [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] Acquiring lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.233482] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2744f67a-e1b3-42e9-b0c5-328f92b39b82 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.347345] env[65503]: DEBUG oslo_concurrency.lockutils [req-126041b2-93b4-42e1-9257-5a760d0b5731 req-f0798f5b-1479-4805-950b-eb9f333cf678 service nova] Releasing lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.348146] env[65503]: WARNING neutronclient.v2_0.client [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1004.348955] env[65503]: WARNING openstack [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.349351] env[65503]: WARNING openstack [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.358098] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.358367] env[65503]: DEBUG nova.network.neutron [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1004.471025] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.471315] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.496977] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2f8a2d31-290a-4380-8d70-0e8065c5b2f1 tempest-VolumesAdminNegativeTest-280625136 tempest-VolumesAdminNegativeTest-280625136-project-member] Lock "a043a8e2-8661-4d80-939d-8e7b02b0459f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.009s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.558268] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.558962] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.605100] env[65503]: DEBUG oslo_vmware.api [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450474, 'name': PowerOnVM_Task, 'duration_secs': 0.715401} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.610228] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.610228] env[65503]: INFO nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Took 7.21 seconds to spawn the instance on the hypervisor. [ 1004.610619] env[65503]: DEBUG nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1004.615552] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c39fa1-3ebb-43b6-855f-bcfce27ffdca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.659715] env[65503]: DEBUG oslo_vmware.api [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450475, 'name': PowerOnVM_Task, 'duration_secs': 0.692904} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.659898] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Resumed the VM {{(pid=65503) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1004.660037] env[65503]: DEBUG nova.compute.manager [None req-dad377c8-94c6-4756-b7fe-3e4be51aa7e3 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1004.661075] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2717b0f0-23e6-4c3b-a8d8-b2c4dab117ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.683210] env[65503]: DEBUG nova.compute.manager [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1004.683210] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1004.683210] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9af4ab-13de-44e8-90aa-952cd260268e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.694080] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.694080] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0589ff6d-e893-44a3-82d6-ce1dbe97185c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.704130] env[65503]: WARNING neutronclient.v2_0.client [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1004.704970] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.705394] env[65503]: WARNING openstack [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.716770] env[65503]: DEBUG nova.network.neutron [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Successfully updated port: 4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1004.720832] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8670da74-8076-4f7d-b311-66450974b958 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.726486] env[65503]: DEBUG oslo_vmware.api [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1004.726486] env[65503]: value = "task-4450476" [ 1004.726486] env[65503]: _type = "Task" [ 1004.726486] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.735136] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb9cbca-1a04-4691-8047-ca7f7de6f70b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.742908] env[65503]: DEBUG oslo_vmware.api [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450476, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.773051] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1004.775086] env[65503]: WARNING openstack [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.775522] env[65503]: WARNING openstack [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.782304] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b10837db-e9d6-4683-a200-76be082eed2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.784842] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0f1d83-72b7-4eb4-a7cc-308e9933475e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.793208] env[65503]: WARNING neutronclient.v2_0.client [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1004.793208] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.793208] env[65503]: WARNING openstack [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.816033] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1853c5-4589-4bb7-a45e-060d82fba46e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.819663] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1004.819663] env[65503]: value = "task-4450477" [ 1004.819663] env[65503]: _type = "Task" [ 1004.819663] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.832782] env[65503]: DEBUG nova.compute.provider_tree [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.844320] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450477, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.862786] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.863148] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.873378] env[65503]: DEBUG nova.network.neutron [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance_info_cache with network_info: [{"id": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "address": "fa:16:3e:6c:9a:41", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e6ffe6-e1", "ovs_interfaceid": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1004.899222] env[65503]: WARNING neutronclient.v2_0.client [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1004.899926] env[65503]: WARNING openstack [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.900454] env[65503]: WARNING openstack [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1004.923447] env[65503]: DEBUG nova.network.neutron [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1004.940306] env[65503]: INFO nova.network.neutron [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Port 995d778e-470e-4cfa-bb9e-b83d9780f51a from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1004.940560] env[65503]: INFO nova.network.neutron [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Port e4ff15a9-ecda-42a3-8724-0d6cab24c894 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1004.940935] env[65503]: DEBUG nova.network.neutron [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [{"id": "a52973b6-14a8-480e-8ad4-92719252801c", "address": "fa:16:3e:73:61:b2", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa52973b6-14", "ovs_interfaceid": "a52973b6-14a8-480e-8ad4-92719252801c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1004.969680] env[65503]: DEBUG nova.compute.manager [req-778b22ae-90fa-4f5d-98b1-fe50dd6b9091 req-09325aea-02e2-4a40-aed2-a0b70b3710cf service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-vif-deleted-e4ff15a9-ecda-42a3-8724-0d6cab24c894 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1004.992156] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1004.992571] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1005.032986] env[65503]: DEBUG nova.network.neutron [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Updating instance_info_cache with network_info: [{"id": "21904121-6624-489f-b851-76b0dfc15641", "address": "fa:16:3e:cd:01:68", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21904121-66", "ovs_interfaceid": "21904121-6624-489f-b851-76b0dfc15641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1005.063759] env[65503]: WARNING neutronclient.v2_0.client [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1005.064479] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1005.064896] env[65503]: WARNING openstack [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1005.147888] env[65503]: INFO nova.compute.manager [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Took 12.91 seconds to build instance. [ 1005.180556] env[65503]: DEBUG nova.network.neutron [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Updating instance_info_cache with network_info: [{"id": "c1fca361-555c-407f-bd51-6ea779e02f3a", "address": "fa:16:3e:ce:a1:55", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fca361-55", "ovs_interfaceid": "c1fca361-555c-407f-bd51-6ea779e02f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1005.228281] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.228521] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.228645] env[65503]: DEBUG nova.network.neutron [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1005.246468] env[65503]: DEBUG oslo_vmware.api [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450476, 'name': PowerOffVM_Task, 'duration_secs': 0.259756} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.246838] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.247034] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.247299] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea56081d-3407-4c5a-939b-49c462219f77 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.329420] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450477, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.340668] env[65503]: DEBUG nova.scheduler.client.report [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.379591] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.390480] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.390755] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.390900] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleting the datastore file [datastore2] 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.391214] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da39baaa-bbbf-40bb-978c-8dd9a7f6e627 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.399248] env[65503]: DEBUG oslo_vmware.api [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1005.399248] env[65503]: value = "task-4450479" [ 1005.399248] env[65503]: _type = "Task" [ 1005.399248] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.409954] env[65503]: DEBUG oslo_vmware.api [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.443866] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.536330] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.652682] env[65503]: DEBUG oslo_concurrency.lockutils [None req-458ce603-b3eb-4499-bdec-b70d253f7b3a tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.432s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.684210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.684597] env[65503]: DEBUG nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Instance network_info: |[{"id": "c1fca361-555c-407f-bd51-6ea779e02f3a", "address": "fa:16:3e:ce:a1:55", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fca361-55", "ovs_interfaceid": "c1fca361-555c-407f-bd51-6ea779e02f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1005.685142] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:a1:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1fca361-555c-407f-bd51-6ea779e02f3a', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1005.693935] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1005.694641] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1005.695014] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f2585f3-26ea-47d1-858a-5512280c171d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.720609] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1005.720609] env[65503]: value = "task-4450480" [ 1005.720609] env[65503]: _type = "Task" [ 1005.720609] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.733143] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1005.733143] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1005.739956] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450480, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.755942] env[65503]: DEBUG nova.compute.manager [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Received event network-vif-plugged-4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1005.758517] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Acquiring lock "96d8f433-9b86-422f-88ef-99836fb21f30-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.758816] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Lock "96d8f433-9b86-422f-88ef-99836fb21f30-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.759021] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Lock "96d8f433-9b86-422f-88ef-99836fb21f30-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.759203] env[65503]: DEBUG nova.compute.manager [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] No waiting events found dispatching network-vif-plugged-4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1005.759403] env[65503]: WARNING nova.compute.manager [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Received unexpected event network-vif-plugged-4089f68d-952e-4b5f-8578-0ce2a0afb9f1 for instance with vm_state building and task_state spawning. [ 1005.759573] env[65503]: DEBUG nova.compute.manager [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Received event network-changed-4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1005.759745] env[65503]: DEBUG nova.compute.manager [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Refreshing instance network info cache due to event network-changed-4089f68d-952e-4b5f-8578-0ce2a0afb9f1. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1005.759925] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Acquiring lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.775605] env[65503]: DEBUG nova.network.neutron [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1005.831190] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450477, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.832658] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1005.833113] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1005.845963] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.848767] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.376s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.875941] env[65503]: INFO nova.scheduler.client.report [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Deleted allocations for instance e4c1c94b-744f-4bed-8e68-3b3f9de7db44 [ 1005.906241] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f643471-affc-4803-85da-d0c5b60354f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.923353] env[65503]: DEBUG oslo_vmware.api [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.939013] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93edc09a-ff9b-44ce-a86b-384bdbcbd39a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.943042] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4e9ced-860c-4683-a2db-c1e4b7492878 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.947765] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9e4bf792-0935-49a1-a62f-b6508c432f87 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-2a1587cd-8b47-439f-948c-d58a5dc8220e-995d778e-470e-4cfa-bb9e-b83d9780f51a" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.383s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.954095] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Suspending the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1005.955140] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance '82415443-1884-4898-996e-828d23f67f23' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1005.959755] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-84f65bbc-8b66-4377-82f3-1aba31111466 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.969195] env[65503]: DEBUG oslo_vmware.api [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1005.969195] env[65503]: value = "task-4450481" [ 1005.969195] env[65503]: _type = "Task" [ 1005.969195] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.979919] env[65503]: DEBUG oslo_vmware.api [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450481, 'name': SuspendVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.037391] env[65503]: WARNING neutronclient.v2_0.client [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1006.038285] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1006.038867] env[65503]: WARNING openstack [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1006.233343] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450480, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.335191] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450477, 'name': CreateSnapshot_Task, 'duration_secs': 1.365719} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.335659] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1006.337147] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd1238a-490e-456c-b565-525f34c00d9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.357119] env[65503]: DEBUG nova.network.neutron [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updating instance_info_cache with network_info: [{"id": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "address": "fa:16:3e:d8:f1:dd", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4089f68d-95", "ovs_interfaceid": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1006.362346] env[65503]: INFO nova.compute.claims [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.390948] env[65503]: DEBUG oslo_concurrency.lockutils [None req-035dedb3-4bf6-4b56-ba35-519c0d1dee03 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "e4c1c94b-744f-4bed-8e68-3b3f9de7db44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.551s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.424857] env[65503]: DEBUG oslo_vmware.api [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.607136} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.427519] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.427519] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.427519] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.427519] env[65503]: INFO nova.compute.manager [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Took 1.75 seconds to destroy the instance on the hypervisor. [ 1006.427519] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1006.427519] env[65503]: DEBUG nova.compute.manager [-] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1006.427519] env[65503]: DEBUG nova.network.neutron [-] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1006.427519] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1006.429390] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1006.429390] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1006.464930] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.465253] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b8cecbf-975a-49d9-bf9e-3a8091fe8114 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.475012] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1006.475012] env[65503]: value = "task-4450482" [ 1006.475012] env[65503]: _type = "Task" [ 1006.475012] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.483560] env[65503]: DEBUG oslo_vmware.api [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450481, 'name': SuspendVM_Task} progress is 62%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.486909] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450482, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.504029] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1006.564813] env[65503]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port e4ff15a9-ecda-42a3-8724-0d6cab24c894 could not be found.", "detail": ""}} {{(pid=65503) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:265}} [ 1006.564813] env[65503]: DEBUG nova.network.neutron [-] Unable to show port e4ff15a9-ecda-42a3-8724-0d6cab24c894 as it no longer exists. {{(pid=65503) _unbind_ports /opt/stack/nova/nova/network/neutron.py:700}} [ 1006.733416] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450480, 'name': CreateVM_Task, 'duration_secs': 0.513592} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.733416] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1006.734050] env[65503]: WARNING neutronclient.v2_0.client [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1006.734244] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.735171] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.735171] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1006.735171] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fa9859b-4dd9-406c-b984-fc5ba407485e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.741127] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1006.741127] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52687571-5ff8-e911-4c88-5c9ac9d538c1" [ 1006.741127] env[65503]: _type = "Task" [ 1006.741127] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.750537] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52687571-5ff8-e911-4c88-5c9ac9d538c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.871326] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Releasing lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.871326] env[65503]: DEBUG nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Instance network_info: |[{"id": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "address": "fa:16:3e:d8:f1:dd", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4089f68d-95", "ovs_interfaceid": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1006.878596] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1006.880410] env[65503]: INFO nova.compute.resource_tracker [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating resource usage from migration 76c6a98b-6534-43d9-bd70-39ee4d80a12e [ 1006.883565] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Acquired lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.883682] env[65503]: DEBUG nova.network.neutron [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Refreshing network info cache for port 4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1006.885054] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:f1:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0e00b2f1-c70f-4b21-86eb-810643cc1680', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4089f68d-952e-4b5f-8578-0ce2a0afb9f1', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.892965] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Creating folder: Project (0dbf0e9b08b741e88f971ec5f54dede8). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.893154] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b8fb4241-38c7-440f-95e3-6b0625fd737c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.897782] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8962e2c1-b93f-4664-aadb-791bb7637bea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.909530] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1006.909530] env[65503]: value = "task-4450483" [ 1006.909530] env[65503]: _type = "Task" [ 1006.909530] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.915162] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Created folder: Project (0dbf0e9b08b741e88f971ec5f54dede8) in parent group-v870190. [ 1006.915162] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Creating folder: Instances. Parent ref: group-v870440. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.915162] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7640e22-cc1d-4a59-a3fa-0570fba449d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.925664] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450483, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.930114] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Created folder: Instances in parent group-v870440. [ 1006.930374] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1006.930875] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.930875] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-148d99b9-2a33-4a50-b998-7fa9f76cdd6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.953986] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.953986] env[65503]: value = "task-4450486" [ 1006.953986] env[65503]: _type = "Task" [ 1006.953986] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.965921] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450486, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.985062] env[65503]: DEBUG oslo_vmware.api [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450481, 'name': SuspendVM_Task} progress is 62%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.994577] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450482, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.086602] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.087257] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30e43d7b-cc57-476c-838f-28cbc7b10cf3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.096114] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1007.096114] env[65503]: value = "task-4450487" [ 1007.096114] env[65503]: _type = "Task" [ 1007.096114] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.110117] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.118963] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c773027a-d3a3-47be-bfda-f9aeadb082b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.128836] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cb70ce-5c83-40fa-b096-06dff04e6b30 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.167139] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ec5c91-290b-4dd0-8822-aede02a4ef55 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.177294] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808f9625-ad12-4282-b9b3-dd5818e85965 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.193985] env[65503]: DEBUG nova.compute.provider_tree [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.255555] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52687571-5ff8-e911-4c88-5c9ac9d538c1, 'name': SearchDatastore_Task, 'duration_secs': 0.024316} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.255983] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.256387] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.256597] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.256833] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.257141] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.257609] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80b3c1ba-8b61-4be7-929d-61555cf7a5da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.269978] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.270211] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.270949] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3208448-2205-4896-b5bb-87a09253b6ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.278685] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1007.278685] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52229835-fc70-d16c-420a-f734c9064aaf" [ 1007.278685] env[65503]: _type = "Task" [ 1007.278685] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.289301] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52229835-fc70-d16c-420a-f734c9064aaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.398306] env[65503]: WARNING neutronclient.v2_0.client [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1007.398952] env[65503]: WARNING openstack [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1007.399445] env[65503]: WARNING openstack [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1007.423850] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450483, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.466098] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450486, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.480623] env[65503]: DEBUG oslo_vmware.api [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450481, 'name': SuspendVM_Task, 'duration_secs': 1.351944} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.484172] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Suspended the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1007.484375] env[65503]: DEBUG nova.compute.manager [None req-01af3d24-6d65-4821-989c-0a0fd25372d9 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1007.485263] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c622a792-b16f-4696-8810-05c70b211b9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.494607] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450482, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.507438] env[65503]: DEBUG nova.network.neutron [-] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1007.611968] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450487, 'name': PowerOffVM_Task, 'duration_secs': 0.258962} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.612316] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.613161] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b15edc-6cfc-4fe4-938b-4343f45475aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.650251] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91677dcb-c4b2-4e23-9a00-96163e59a707 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.695968] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.696992] env[65503]: DEBUG nova.scheduler.client.report [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.700532] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff52598c-4c1d-4443-a635-fb59250729f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.710552] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1007.710552] env[65503]: value = "task-4450488" [ 1007.710552] env[65503]: _type = "Task" [ 1007.710552] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.722868] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1007.723763] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.723763] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.792572] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52229835-fc70-d16c-420a-f734c9064aaf, 'name': SearchDatastore_Task, 'duration_secs': 0.015194} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.793347] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39e49cf4-b64d-4fdd-81d9-dcb004e8bb1a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.801359] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1007.801359] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523ccc3f-473b-6356-d917-30f7bbfd18c7" [ 1007.801359] env[65503]: _type = "Task" [ 1007.801359] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.812173] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523ccc3f-473b-6356-d917-30f7bbfd18c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.865938] env[65503]: WARNING openstack [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1007.866293] env[65503]: WARNING openstack [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1007.925373] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450483, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.966418] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450486, 'name': CreateVM_Task, 'duration_secs': 0.693027} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.966624] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.967120] env[65503]: WARNING neutronclient.v2_0.client [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1007.967494] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.967815] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.967938] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1007.968215] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05237752-6e58-4b1b-8304-2b50e442b2a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.976399] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1007.976399] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52846e89-d005-9aa9-ddc4-97d18d3e9a0c" [ 1007.976399] env[65503]: _type = "Task" [ 1007.976399] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.003081] env[65503]: DEBUG oslo_vmware.api [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450482, 'name': PowerOnVM_Task, 'duration_secs': 1.406756} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.006379] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52846e89-d005-9aa9-ddc4-97d18d3e9a0c, 'name': SearchDatastore_Task, 'duration_secs': 0.011341} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.006487] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1008.007097] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e52b0a51-08d0-422a-a695-8c70fc7fb8cc tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance '82415443-1884-4898-996e-828d23f67f23' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1008.011979] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.011979] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.011979] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.012790] env[65503]: INFO nova.compute.manager [-] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Took 1.59 seconds to deallocate network for instance. [ 1008.204996] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.357s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.205254] env[65503]: INFO nova.compute.manager [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Migrating [ 1008.212228] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.114s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.213713] env[65503]: INFO nova.compute.claims [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.314463] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523ccc3f-473b-6356-d917-30f7bbfd18c7, 'name': SearchDatastore_Task, 'duration_secs': 0.015236} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.315786] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.316166] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] fcdcabb9-f076-4fa9-ac30-3220eb6064da/fcdcabb9-f076-4fa9-ac30-3220eb6064da.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.316535] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.316816] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.317155] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be90bebe-bec2-4016-8a59-7d7bcbd791a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.320653] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e20e8232-d23c-4baa-9719-84fd297c401c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.336022] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1008.336022] env[65503]: value = "task-4450489" [ 1008.336022] env[65503]: _type = "Task" [ 1008.336022] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.341256] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.341369] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.342882] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c5ab576-7bdf-4d39-a513-5aaab5cc4e10 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.350973] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450489, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.358160] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1008.358160] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5208b226-292c-604f-f079-7838f5608721" [ 1008.358160] env[65503]: _type = "Task" [ 1008.358160] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.371790] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5208b226-292c-604f-f079-7838f5608721, 'name': SearchDatastore_Task, 'duration_secs': 0.010853} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.372677] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-234534d7-398e-4bac-803c-834c3bb231d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.379925] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1008.379925] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cdf51b-042e-d257-5947-ecb5c911edb6" [ 1008.379925] env[65503]: _type = "Task" [ 1008.379925] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.391241] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cdf51b-042e-d257-5947-ecb5c911edb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.429064] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450483, 'name': CloneVM_Task, 'duration_secs': 1.375809} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.429513] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Created linked-clone VM from snapshot [ 1008.430293] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6ccf7e-531e-4e95-adbb-56665377e710 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.440312] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Uploading image ba6b58dd-a86f-4241-b14b-e9590b647969 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1008.449731] env[65503]: DEBUG nova.compute.manager [req-d25c4cf5-44e7-488b-847c-f122fa4e89e9 req-c6f8fa60-9199-43b9-b5b5-f06add21f1d3 service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Received event network-vif-deleted-a52973b6-14a8-480e-8ad4-92719252801c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1008.476115] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1008.476115] env[65503]: value = "vm-870442" [ 1008.476115] env[65503]: _type = "VirtualMachine" [ 1008.476115] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1008.476478] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ea065c3e-d7dd-4275-a61e-a5d83a7bd1d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.493369] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease: (returnval){ [ 1008.493369] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f73399-356c-cf05-947d-6da17a2cfee9" [ 1008.493369] env[65503]: _type = "HttpNfcLease" [ 1008.493369] env[65503]: } obtained for exporting VM: (result){ [ 1008.493369] env[65503]: value = "vm-870442" [ 1008.493369] env[65503]: _type = "VirtualMachine" [ 1008.493369] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1008.493369] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the lease: (returnval){ [ 1008.493369] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f73399-356c-cf05-947d-6da17a2cfee9" [ 1008.493369] env[65503]: _type = "HttpNfcLease" [ 1008.493369] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1008.503086] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1008.503086] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f73399-356c-cf05-947d-6da17a2cfee9" [ 1008.503086] env[65503]: _type = "HttpNfcLease" [ 1008.503086] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1008.523932] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.556584] env[65503]: WARNING neutronclient.v2_0.client [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1008.557394] env[65503]: WARNING openstack [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1008.557820] env[65503]: WARNING openstack [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1008.730084] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.730084] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.730487] env[65503]: DEBUG nova.network.neutron [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1008.789540] env[65503]: DEBUG nova.network.neutron [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updated VIF entry in instance network info cache for port 4089f68d-952e-4b5f-8578-0ce2a0afb9f1. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1008.789540] env[65503]: DEBUG nova.network.neutron [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updating instance_info_cache with network_info: [{"id": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "address": "fa:16:3e:d8:f1:dd", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4089f68d-95", "ovs_interfaceid": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1008.853744] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450489, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.891661] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cdf51b-042e-d257-5947-ecb5c911edb6, 'name': SearchDatastore_Task, 'duration_secs': 0.011155} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.892650] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.892650] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] e37758cc-7287-4271-ad47-d711201d0add/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. {{(pid=65503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1008.892650] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.892839] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.892986] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-355a3733-d827-4b30-a1d0-7ee6ec2f8c6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.895492] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73f78cf7-659e-48e8-8bef-759aa499b268 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.903526] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1008.903526] env[65503]: value = "task-4450491" [ 1008.903526] env[65503]: _type = "Task" [ 1008.903526] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.908034] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.908034] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.911397] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d91dbc-3a8a-4f2b-8c88-4f9aa7ad5abd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.915052] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.918901] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1008.918901] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52821818-2754-639a-ba83-f0bc7da64011" [ 1008.918901] env[65503]: _type = "Task" [ 1008.918901] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.927512] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52821818-2754-639a-ba83-f0bc7da64011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.003686] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1009.003686] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f73399-356c-cf05-947d-6da17a2cfee9" [ 1009.003686] env[65503]: _type = "HttpNfcLease" [ 1009.003686] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1009.004083] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1009.004083] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f73399-356c-cf05-947d-6da17a2cfee9" [ 1009.004083] env[65503]: _type = "HttpNfcLease" [ 1009.004083] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1009.004935] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861f0bfe-388e-4f3e-8c1d-840078d127b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.014332] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a05b22-a5b7-d744-41d5-3483cdbfaa49/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1009.014590] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a05b22-a5b7-d744-41d5-3483cdbfaa49/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1009.233677] env[65503]: WARNING neutronclient.v2_0.client [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1009.234523] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1009.235408] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1009.246107] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f93c2555-5aec-4070-b0ec-14602b03ef4b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.296901] env[65503]: DEBUG oslo_concurrency.lockutils [req-ad6da787-3b36-43c9-9f15-d16534c3dafb req-6810d256-c609-4fc6-9b64-c52394272788 service nova] Releasing lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.352231] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450489, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526328} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.353029] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] fcdcabb9-f076-4fa9-ac30-3220eb6064da/fcdcabb9-f076-4fa9-ac30-3220eb6064da.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.353029] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1009.353029] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c14697f-b887-4af3-8b1a-6c9a80153a68 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.367084] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1009.367084] env[65503]: value = "task-4450492" [ 1009.367084] env[65503]: _type = "Task" [ 1009.367084] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.378146] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450492, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.420580] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466645} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.424221] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] e37758cc-7287-4271-ad47-d711201d0add/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. [ 1009.425225] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef7335a-9a8a-4cef-92ca-3765d91e27e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.436390] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52821818-2754-639a-ba83-f0bc7da64011, 'name': SearchDatastore_Task, 'duration_secs': 0.021302} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.461682] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] e37758cc-7287-4271-ad47-d711201d0add/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.466028] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2041a8d3-50ed-48d0-9d9b-dd025977d4b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.468610] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-181565d8-f0d9-43c4-b342-9a8c0b2d2a03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.492385] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1009.492385] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520fc803-8a37-ba85-448c-69701197302b" [ 1009.492385] env[65503]: _type = "Task" [ 1009.492385] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.498389] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1009.498389] env[65503]: value = "task-4450493" [ 1009.498389] env[65503]: _type = "Task" [ 1009.498389] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.513122] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520fc803-8a37-ba85-448c-69701197302b, 'name': SearchDatastore_Task, 'duration_secs': 0.013832} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.514067] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.514237] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 96d8f433-9b86-422f-88ef-99836fb21f30/96d8f433-9b86-422f-88ef-99836fb21f30.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.514715] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43648a13-213f-4b64-a161-f0563eae74c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.526102] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.535430] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1009.535430] env[65503]: value = "task-4450494" [ 1009.535430] env[65503]: _type = "Task" [ 1009.535430] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.549388] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450494, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.596061] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be32097d-a9d7-46ce-80b8-007806ce4b3a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.607931] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7b2220-6497-4771-ad5f-c4bd90145881 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.642504] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.642871] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.643108] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.644009] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.644009] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.646792] env[65503]: INFO nova.compute.manager [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Terminating instance [ 1009.649073] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801d4343-7ef6-4ab3-8c60-4d3d32274236 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.660625] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736fbf53-c3d3-4e8f-89fc-1c80995265a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.680440] env[65503]: DEBUG nova.compute.provider_tree [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.841390] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1009.842230] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1009.886939] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450492, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06999} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.886939] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1009.886939] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c13899f-ef08-4dd0-b546-0ffe54a2aac8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.917012] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] fcdcabb9-f076-4fa9-ac30-3220eb6064da/fcdcabb9-f076-4fa9-ac30-3220eb6064da.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.917417] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a83055ef-6e1c-4693-8cc4-006ee1ab36f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.942107] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1009.942107] env[65503]: value = "task-4450495" [ 1009.942107] env[65503]: _type = "Task" [ 1009.942107] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.951817] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450495, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.017247] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.048596] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450494, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.157320] env[65503]: DEBUG nova.compute.manager [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1010.157418] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1010.158972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df937ecd-3d5f-4eff-af42-1584018b485e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.170582] env[65503]: WARNING neutronclient.v2_0.client [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1010.171366] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1010.171841] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1010.185769] env[65503]: DEBUG nova.scheduler.client.report [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.190161] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.190783] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4e5bdc0-fb98-48fe-9b83-b2f171d7e206 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.278500] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.278805] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.279034] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleting the datastore file [datastore1] 9e1b174f-c7cb-45f1-b7c0-b980f32823c8 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.279375] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7651b227-f349-4d30-82b2-49278b249a7b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.288336] env[65503]: DEBUG oslo_vmware.api [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1010.288336] env[65503]: value = "task-4450497" [ 1010.288336] env[65503]: _type = "Task" [ 1010.288336] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.300319] env[65503]: DEBUG oslo_vmware.api [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.323470] env[65503]: DEBUG nova.network.neutron [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1010.455108] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450495, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.514577] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450493, 'name': ReconfigVM_Task, 'duration_secs': 0.711972} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.514577] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Reconfigured VM instance instance-00000054 to attach disk [datastore1] e37758cc-7287-4271-ad47-d711201d0add/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.515215] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36beb9c0-b6aa-4888-a9cf-4ba96f683b41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.556602] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d759de1d-8e79-46e6-829a-def1db09e8f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.576973] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450494, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715375} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.578835] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 96d8f433-9b86-422f-88ef-99836fb21f30/96d8f433-9b86-422f-88ef-99836fb21f30.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.579406] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.579986] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1010.579986] env[65503]: value = "task-4450498" [ 1010.579986] env[65503]: _type = "Task" [ 1010.579986] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.580473] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-819043ee-a809-4b24-834a-cb12162ba196 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.598281] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450498, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.602800] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1010.602800] env[65503]: value = "task-4450499" [ 1010.602800] env[65503]: _type = "Task" [ 1010.602800] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.622746] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.691830] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.692724] env[65503]: DEBUG nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1010.695739] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.172s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.696123] env[65503]: DEBUG nova.objects.instance [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'resources' on Instance uuid 2a1587cd-8b47-439f-948c-d58a5dc8220e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.799854] env[65503]: DEBUG oslo_vmware.api [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357072} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.800186] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.800393] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.800610] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.800912] env[65503]: INFO nova.compute.manager [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1010.801075] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1010.801430] env[65503]: DEBUG nova.compute.manager [-] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1010.801430] env[65503]: DEBUG nova.network.neutron [-] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1010.801777] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1010.802992] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1010.803461] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1010.827753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.859099] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1010.955337] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450495, 'name': ReconfigVM_Task, 'duration_secs': 0.752482} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.955993] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Reconfigured VM instance instance-00000058 to attach disk [datastore1] fcdcabb9-f076-4fa9-ac30-3220eb6064da/fcdcabb9-f076-4fa9-ac30-3220eb6064da.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.959463] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-583048f7-c477-4def-aec8-72dc6cda52d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.973322] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1010.973322] env[65503]: value = "task-4450500" [ 1010.973322] env[65503]: _type = "Task" [ 1010.973322] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.987939] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450500, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.097241] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450498, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.118781] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087702} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.119910] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.120889] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5183b885-99ca-47b0-9f11-393fb1fe4035 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.150460] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 96d8f433-9b86-422f-88ef-99836fb21f30/96d8f433-9b86-422f-88ef-99836fb21f30.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.152351] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b86d20c-2664-47d5-a491-35f37e04731b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.179081] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1011.179081] env[65503]: value = "task-4450501" [ 1011.179081] env[65503]: _type = "Task" [ 1011.179081] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.189659] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.200399] env[65503]: DEBUG nova.compute.utils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.208311] env[65503]: DEBUG nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1011.208429] env[65503]: DEBUG nova.network.neutron [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1011.208706] env[65503]: WARNING neutronclient.v2_0.client [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1011.208999] env[65503]: WARNING neutronclient.v2_0.client [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1011.209756] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1011.210181] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1011.317063] env[65503]: DEBUG nova.policy [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f06102af25d4279a0104a75b62014fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '592efb180976432cbcecb9ad421e1bd1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1011.469916] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b05d506-a906-4c01-9052-53d583ddff62 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.482610] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dbf09b-b49d-4af1-9644-19ad35656c84 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.489341] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450500, 'name': Rename_Task, 'duration_secs': 0.227767} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.490082] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.490752] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edc711a4-3888-4415-93ab-9d12ec875dfc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.521937] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c4905f-3212-40de-a01a-ffacaab879d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.525482] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1011.525482] env[65503]: value = "task-4450502" [ 1011.525482] env[65503]: _type = "Task" [ 1011.525482] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.533515] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787e1ab3-71c9-4979-a5f0-8372301968e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.542403] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450502, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.556355] env[65503]: DEBUG nova.compute.provider_tree [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.596389] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450498, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.674034] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "82415443-1884-4898-996e-828d23f67f23" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.674542] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.675091] env[65503]: DEBUG nova.compute.manager [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Going to confirm migration 4 {{(pid=65503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1011.680875] env[65503]: DEBUG nova.compute.manager [req-267aa11f-9743-4f94-a20f-2edd5d1a3a20 req-dbe578f1-f77f-4f00-9804-a1527047f808 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Received event network-vif-deleted-cf5f2d40-016e-4723-bc1b-d56cbe868b91 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1011.680875] env[65503]: INFO nova.compute.manager [req-267aa11f-9743-4f94-a20f-2edd5d1a3a20 req-dbe578f1-f77f-4f00-9804-a1527047f808 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Neutron deleted interface cf5f2d40-016e-4723-bc1b-d56cbe868b91; detaching it from the instance and deleting it from the info cache [ 1011.680875] env[65503]: DEBUG nova.network.neutron [req-267aa11f-9743-4f94-a20f-2edd5d1a3a20 req-dbe578f1-f77f-4f00-9804-a1527047f808 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1011.694081] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450501, 'name': ReconfigVM_Task, 'duration_secs': 0.415922} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.694407] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 96d8f433-9b86-422f-88ef-99836fb21f30/96d8f433-9b86-422f-88ef-99836fb21f30.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.695122] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b46ed0ee-d223-4c1e-8ff3-4d771dfb146a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.703935] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1011.703935] env[65503]: value = "task-4450503" [ 1011.703935] env[65503]: _type = "Task" [ 1011.703935] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.710222] env[65503]: DEBUG nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1011.723492] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450503, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.767878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.768210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.769781] env[65503]: DEBUG nova.network.neutron [-] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1011.777285] env[65503]: DEBUG nova.network.neutron [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Successfully created port: 96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1012.038756] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450502, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.064271] env[65503]: DEBUG nova.scheduler.client.report [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.098901] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450498, 'name': ReconfigVM_Task, 'duration_secs': 1.443789} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.099381] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.099789] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dfcc0bb-460f-4887-9487-4f3eebb19b32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.109035] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1012.109035] env[65503]: value = "task-4450504" [ 1012.109035] env[65503]: _type = "Task" [ 1012.109035] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.120870] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.186086] env[65503]: WARNING neutronclient.v2_0.client [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1012.188118] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36a87dd7-0f68-4674-8649-872f94e7868c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.198740] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef3e24b-c745-494a-b0d0-7f93e47b447d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.224744] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450503, 'name': Rename_Task, 'duration_secs': 0.216768} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.225059] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.225325] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea3c5884-2175-4673-81a2-c501276f691e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.238653] env[65503]: DEBUG nova.compute.manager [req-267aa11f-9743-4f94-a20f-2edd5d1a3a20 req-dbe578f1-f77f-4f00-9804-a1527047f808 service nova] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Detach interface failed, port_id=cf5f2d40-016e-4723-bc1b-d56cbe868b91, reason: Instance 9e1b174f-c7cb-45f1-b7c0-b980f32823c8 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1012.240738] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1012.240738] env[65503]: value = "task-4450505" [ 1012.240738] env[65503]: _type = "Task" [ 1012.240738] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.250882] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450505, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.272371] env[65503]: DEBUG nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1012.276582] env[65503]: INFO nova.compute.manager [-] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Took 1.47 seconds to deallocate network for instance. [ 1012.346168] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1051f2d2-e847-4252-91c8-a1f6df8429f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.356022] env[65503]: WARNING neutronclient.v2_0.client [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1012.356022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.356022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.356022] env[65503]: DEBUG nova.network.neutron [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1012.356022] env[65503]: DEBUG nova.objects.instance [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lazy-loading 'info_cache' on Instance uuid 82415443-1884-4898-996e-828d23f67f23 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.372159] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance '2effe3e4-ea22-4d9f-8f5c-38ee661611e3' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1012.538701] env[65503]: DEBUG oslo_vmware.api [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450502, 'name': PowerOnVM_Task, 'duration_secs': 0.64317} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.539039] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.539327] env[65503]: INFO nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Took 10.50 seconds to spawn the instance on the hypervisor. [ 1012.539663] env[65503]: DEBUG nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1012.540398] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bff20d3-9709-4c38-8150-43d8dce100a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.568286] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.872s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.595772] env[65503]: INFO nova.scheduler.client.report [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted allocations for instance 2a1587cd-8b47-439f-948c-d58a5dc8220e [ 1012.623170] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450504, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.722333] env[65503]: DEBUG nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1012.758231] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450505, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.759783] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1012.760362] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1012.760759] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1012.761143] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1012.761486] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1012.761803] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1012.762246] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1012.762618] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1012.763042] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1012.764256] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1012.764613] env[65503]: DEBUG nova.virt.hardware [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1012.766058] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf46fcfd-0903-495b-8d22-ceb04f7b63a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.776807] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2f30c4-55bf-4c6d-8be6-fcd130d0f04e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.790038] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.790038] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.790038] env[65503]: DEBUG nova.objects.instance [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'resources' on Instance uuid 9e1b174f-c7cb-45f1-b7c0-b980f32823c8 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.811298] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.880709] env[65503]: WARNING neutronclient.v2_0.client [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1012.881431] env[65503]: WARNING openstack [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1012.881820] env[65503]: WARNING openstack [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1012.891469] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.891983] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac08d172-ce40-4587-bcfe-55cc4cfa4fb1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.901480] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1012.901480] env[65503]: value = "task-4450506" [ 1012.901480] env[65503]: _type = "Task" [ 1012.901480] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.912443] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1012.912687] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance '2effe3e4-ea22-4d9f-8f5c-38ee661611e3' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1013.064982] env[65503]: INFO nova.compute.manager [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Took 15.46 seconds to build instance. [ 1013.104074] env[65503]: DEBUG oslo_concurrency.lockutils [None req-240233f2-71d4-4178-b67f-40c7179c789a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.932s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.105194] env[65503]: DEBUG oslo_concurrency.lockutils [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] Acquired lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.106493] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306737dd-0380-4bb7-bd36-a73196d51747 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.116520] env[65503]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1013.116520] env[65503]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=65503) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1013.116819] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c999702-e53b-409b-bec2-b995e615cd16 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.122240] env[65503]: DEBUG oslo_vmware.api [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450504, 'name': PowerOnVM_Task, 'duration_secs': 0.595604} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.123051] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.127157] env[65503]: DEBUG nova.compute.manager [None req-6d057b30-8dfb-4ae6-83dd-a039283fbc1c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1013.128167] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15937c81-a0c5-4501-a2c9-6d75e04042e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.137675] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbee278-d69c-44dc-be04-565bfa9256c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.179859] env[65503]: ERROR root [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-870404' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-870404' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-870404' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-870404'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-870404' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-870404' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-870404'}\n"]: nova.exception.InstanceNotFound: Instance 2a1587cd-8b47-439f-948c-d58a5dc8220e could not be found. [ 1013.180282] env[65503]: DEBUG oslo_concurrency.lockutils [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] Releasing lock "2a1587cd-8b47-439f-948c-d58a5dc8220e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.180410] env[65503]: DEBUG nova.compute.manager [req-c01d7117-6be0-4346-b414-8cb1d92b8566 req-3ca17e2c-304e-4dd5-a52f-9f519e4d0d8a service nova] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Detach interface failed, port_id=995d778e-470e-4cfa-bb9e-b83d9780f51a, reason: Instance 2a1587cd-8b47-439f-948c-d58a5dc8220e could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1013.255566] env[65503]: DEBUG oslo_vmware.api [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450505, 'name': PowerOnVM_Task, 'duration_secs': 0.965558} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.255868] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.256133] env[65503]: INFO nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Took 9.30 seconds to spawn the instance on the hypervisor. [ 1013.256318] env[65503]: DEBUG nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1013.257137] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28dc899-777d-478d-addc-b6d431f33136 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.421341] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1013.421649] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1013.421910] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1013.422145] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1013.422391] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1013.422543] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1013.422746] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1013.422952] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1013.423115] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1013.423273] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1013.423433] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1013.429151] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10e6e73e-5067-43d4-a7f8-5ed17f037187 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.447741] env[65503]: DEBUG nova.network.neutron [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Successfully updated port: 96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1013.457682] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1013.457682] env[65503]: value = "task-4450507" [ 1013.457682] env[65503]: _type = "Task" [ 1013.457682] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.472556] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450507, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.516085] env[65503]: DEBUG nova.compute.manager [req-866fbbad-6516-48b2-b760-2c161beb54d6 req-52dc7d90-9d61-4fe6-bd0a-d3ada74b6d71 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-vif-plugged-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1013.516289] env[65503]: DEBUG oslo_concurrency.lockutils [req-866fbbad-6516-48b2-b760-2c161beb54d6 req-52dc7d90-9d61-4fe6-bd0a-d3ada74b6d71 service nova] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.516549] env[65503]: DEBUG oslo_concurrency.lockutils [req-866fbbad-6516-48b2-b760-2c161beb54d6 req-52dc7d90-9d61-4fe6-bd0a-d3ada74b6d71 service nova] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.516643] env[65503]: DEBUG oslo_concurrency.lockutils [req-866fbbad-6516-48b2-b760-2c161beb54d6 req-52dc7d90-9d61-4fe6-bd0a-d3ada74b6d71 service nova] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.516805] env[65503]: DEBUG nova.compute.manager [req-866fbbad-6516-48b2-b760-2c161beb54d6 req-52dc7d90-9d61-4fe6-bd0a-d3ada74b6d71 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] No waiting events found dispatching network-vif-plugged-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1013.516962] env[65503]: WARNING nova.compute.manager [req-866fbbad-6516-48b2-b760-2c161beb54d6 req-52dc7d90-9d61-4fe6-bd0a-d3ada74b6d71 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received unexpected event network-vif-plugged-96370c0c-da2e-4229-82a1-a24b799e6402 for instance with vm_state building and task_state spawning. [ 1013.524222] env[65503]: WARNING openstack [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1013.524306] env[65503]: WARNING openstack [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1013.567924] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51ba30d5-2603-456a-865e-37b204983b68 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.973s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.577562] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0d2bfd-fc74-415f-ab2a-f142baf0fd47 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.589151] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7beb951-2656-47f1-b725-fac3e95d6c84 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.631548] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae3b216-afd2-48a0-b96f-faf5ba47cc53 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.640301] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a33c1b-599a-4079-b35c-80934ddd6393 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.659547] env[65503]: DEBUG nova.compute.provider_tree [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.711095] env[65503]: WARNING neutronclient.v2_0.client [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1013.711918] env[65503]: WARNING openstack [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1013.712314] env[65503]: WARNING openstack [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1013.782017] env[65503]: INFO nova.compute.manager [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Took 15.82 seconds to build instance. [ 1013.878683] env[65503]: DEBUG nova.network.neutron [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance_info_cache with network_info: [{"id": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "address": "fa:16:3e:6c:9a:41", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e6ffe6-e1", "ovs_interfaceid": "d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1013.952361] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.952361] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.952361] env[65503]: DEBUG nova.network.neutron [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1013.968429] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450507, 'name': ReconfigVM_Task, 'duration_secs': 0.302306} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.968778] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance '2effe3e4-ea22-4d9f-8f5c-38ee661611e3' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1014.164021] env[65503]: DEBUG nova.scheduler.client.report [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.284598] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a04d5f12-216b-4507-a9d4-921b30eac4cc tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "96d8f433-9b86-422f-88ef-99836fb21f30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.328s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.383856] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-82415443-1884-4898-996e-828d23f67f23" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.383856] env[65503]: DEBUG nova.objects.instance [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lazy-loading 'migration_context' on Instance uuid 82415443-1884-4898-996e-828d23f67f23 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.458500] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1014.458500] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1014.475952] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1014.476634] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1014.476634] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1014.476634] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1014.476801] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1014.476842] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1014.477475] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1014.477475] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1014.477475] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1014.477721] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1014.477838] env[65503]: DEBUG nova.virt.hardware [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1014.485634] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Reconfiguring VM instance instance-00000045 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1014.486295] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeda4a69-2594-4cd1-9825-8cf0c6b39a54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.510630] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1014.510630] env[65503]: value = "task-4450508" [ 1014.510630] env[65503]: _type = "Task" [ 1014.510630] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.527611] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450508, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.599978] env[65503]: DEBUG nova.network.neutron [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1014.672494] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.679500] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.864s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.682630] env[65503]: INFO nova.compute.claims [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.716400] env[65503]: INFO nova.scheduler.client.report [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted allocations for instance 9e1b174f-c7cb-45f1-b7c0-b980f32823c8 [ 1014.889366] env[65503]: DEBUG nova.objects.base [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Object Instance<82415443-1884-4898-996e-828d23f67f23> lazy-loaded attributes: info_cache,migration_context {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1014.889366] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1566bed5-17e1-48e5-b1ee-7f8589d11445 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.916960] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c22dba2-3447-46ce-938b-eb8d25be5b87 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.927414] env[65503]: DEBUG oslo_vmware.api [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1014.927414] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c197f9-f4b6-e019-e2d5-b217b21e8c9a" [ 1014.927414] env[65503]: _type = "Task" [ 1014.927414] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.939777] env[65503]: DEBUG oslo_vmware.api [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c197f9-f4b6-e019-e2d5-b217b21e8c9a, 'name': SearchDatastore_Task, 'duration_secs': 0.011198} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.943022] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1014.943022] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1014.950621] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.023295] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450508, 'name': ReconfigVM_Task, 'duration_secs': 0.265822} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.024609] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Reconfigured VM instance instance-00000045 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1015.025810] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd88c70-8237-46fe-a96b-e3d61af85597 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.062543] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 2effe3e4-ea22-4d9f-8f5c-38ee661611e3/2effe3e4-ea22-4d9f-8f5c-38ee661611e3.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.063798] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-337e825d-87b5-45c5-90b3-c4be32fa5923 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.088322] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1015.088322] env[65503]: value = "task-4450509" [ 1015.088322] env[65503]: _type = "Task" [ 1015.088322] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.102063] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450509, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.186299] env[65503]: WARNING neutronclient.v2_0.client [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1015.187034] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1015.187454] env[65503]: WARNING openstack [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1015.229450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6441df42-4bbf-4888-a52e-c322a77783ee tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "9e1b174f-c7cb-45f1-b7c0-b980f32823c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.583s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.446464] env[65503]: DEBUG nova.network.neutron [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96370c0c-da", "ovs_interfaceid": "96370c0c-da2e-4229-82a1-a24b799e6402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1015.500861] env[65503]: INFO nova.compute.manager [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Unrescuing [ 1015.501167] env[65503]: DEBUG oslo_concurrency.lockutils [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.501419] env[65503]: DEBUG oslo_concurrency.lockutils [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.501673] env[65503]: DEBUG nova.network.neutron [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1015.613159] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.953021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.953021] env[65503]: DEBUG nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Instance network_info: |[{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96370c0c-da", "ovs_interfaceid": "96370c0c-da2e-4229-82a1-a24b799e6402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1015.953021] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:2f:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96370c0c-da2e-4229-82a1-a24b799e6402', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1015.960331] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating folder: Project (592efb180976432cbcecb9ad421e1bd1). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1015.963960] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af241f13-f2a1-41dd-bc8d-1d879ac830b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.979165] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created folder: Project (592efb180976432cbcecb9ad421e1bd1) in parent group-v870190. [ 1015.979482] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating folder: Instances. Parent ref: group-v870444. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1015.980265] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18858471-ce24-4ed3-8bcb-b251e6956ab4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.995728] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119edb4d-4b78-476a-97ef-c4338b300b11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.002788] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created folder: Instances in parent group-v870444. [ 1016.002788] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1016.003154] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.003412] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b21f04e-e18d-46d3-a5dd-6b2896c88e0c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.026381] env[65503]: WARNING neutronclient.v2_0.client [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.027205] env[65503]: WARNING openstack [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1016.027525] env[65503]: WARNING openstack [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1016.038589] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c124a503-1344-4fb0-ae82-6f43d8e4a4d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.045754] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.045754] env[65503]: value = "task-4450512" [ 1016.045754] env[65503]: _type = "Task" [ 1016.045754] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.088916] env[65503]: DEBUG nova.compute.manager [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1016.088916] env[65503]: DEBUG nova.compute.manager [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing instance network info cache due to event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1016.088916] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Acquiring lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.088916] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Acquired lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.089161] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1016.092098] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4084150-4568-4577-82aa-bdc3cf011867 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.102896] env[65503]: DEBUG nova.compute.manager [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Received event network-changed-4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1016.103149] env[65503]: DEBUG nova.compute.manager [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Refreshing instance network info cache due to event network-changed-4089f68d-952e-4b5f-8578-0ce2a0afb9f1. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1016.103393] env[65503]: DEBUG oslo_concurrency.lockutils [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Acquiring lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.103540] env[65503]: DEBUG oslo_concurrency.lockutils [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Acquired lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.103702] env[65503]: DEBUG nova.network.neutron [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Refreshing network info cache for port 4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1016.105542] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450512, 'name': CreateVM_Task} progress is 15%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.121462] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.123093] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bd51f5-8734-494a-be1c-ccf424492e4b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.144737] env[65503]: DEBUG nova.compute.provider_tree [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.200710] env[65503]: WARNING openstack [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1016.201184] env[65503]: WARNING openstack [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1016.329443] env[65503]: WARNING neutronclient.v2_0.client [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.330800] env[65503]: WARNING openstack [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1016.330800] env[65503]: WARNING openstack [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1016.511157] env[65503]: DEBUG nova.network.neutron [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Updating instance_info_cache with network_info: [{"id": "21904121-6624-489f-b851-76b0dfc15641", "address": "fa:16:3e:cd:01:68", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21904121-66", "ovs_interfaceid": "21904121-6624-489f-b851-76b0dfc15641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1016.558364] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450512, 'name': CreateVM_Task, 'duration_secs': 0.42804} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.559672] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1016.559672] env[65503]: WARNING neutronclient.v2_0.client [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.559672] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.560090] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.560471] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1016.561926] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b651a285-4ca5-4f62-80c7-5359a844f2aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.568623] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1016.568623] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5290da01-8a3c-72b2-13c8-714fda138836" [ 1016.568623] env[65503]: _type = "Task" [ 1016.568623] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.583637] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5290da01-8a3c-72b2-13c8-714fda138836, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.600778] env[65503]: WARNING neutronclient.v2_0.client [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.601517] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1016.601862] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1016.612147] env[65503]: WARNING neutronclient.v2_0.client [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.612567] env[65503]: WARNING openstack [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1016.612945] env[65503]: WARNING openstack [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1016.633051] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.649116] env[65503]: DEBUG nova.scheduler.client.report [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.752312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.752621] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.752832] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.753027] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.753197] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.755768] env[65503]: INFO nova.compute.manager [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Terminating instance [ 1017.015751] env[65503]: DEBUG oslo_concurrency.lockutils [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-e37758cc-7287-4271-ad47-d711201d0add" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.016509] env[65503]: DEBUG nova.objects.instance [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lazy-loading 'flavor' on Instance uuid e37758cc-7287-4271-ad47-d711201d0add {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.083270] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5290da01-8a3c-72b2-13c8-714fda138836, 'name': SearchDatastore_Task, 'duration_secs': 0.021873} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.083659] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.089027] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.089027] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.089027] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.089027] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.089027] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-567562b4-9549-4df0-a65f-0987394752da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.090278] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1017.090850] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1017.100969] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.101945] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.103103] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf4de2bd-7e76-40a0-b3ab-cc07dff8228c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.113253] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1017.113253] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d01cd8-5162-3560-0ff4-b3509d0e16da" [ 1017.113253] env[65503]: _type = "Task" [ 1017.113253] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.127963] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d01cd8-5162-3560-0ff4-b3509d0e16da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.131757] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.138826] env[65503]: WARNING openstack [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1017.139285] env[65503]: WARNING openstack [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1017.156563] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.157618] env[65503]: DEBUG nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1017.160987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.211s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.259786] env[65503]: DEBUG nova.compute.manager [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1017.260120] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1017.261102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac08f8c0-4425-499a-b8b8-01f6ff026bbf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.275908] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1017.275908] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fc962e3-ff66-484c-87ed-3f94039a3366 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.287562] env[65503]: DEBUG oslo_vmware.api [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 1017.287562] env[65503]: value = "task-4450513" [ 1017.287562] env[65503]: _type = "Task" [ 1017.287562] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.298982] env[65503]: DEBUG oslo_vmware.api [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.423999] env[65503]: WARNING neutronclient.v2_0.client [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1017.424747] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1017.425210] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1017.522792] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18db3cba-41d0-45eb-823c-bd6a9d1f56a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.527865] env[65503]: WARNING neutronclient.v2_0.client [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1017.528803] env[65503]: WARNING openstack [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1017.529474] env[65503]: WARNING openstack [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1017.558668] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1017.558793] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b63974a-e72b-4ecf-a062-d25276772781 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.567201] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1017.567201] env[65503]: value = "task-4450514" [ 1017.567201] env[65503]: _type = "Task" [ 1017.567201] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.576813] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.635600] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.636121] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d01cd8-5162-3560-0ff4-b3509d0e16da, 'name': SearchDatastore_Task, 'duration_secs': 0.021405} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.637197] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-314dd532-5186-463c-b253-fc990a3f8300 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.647396] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1017.647396] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c3cee6-3e8b-2b44-3875-e7d22c8df46b" [ 1017.647396] env[65503]: _type = "Task" [ 1017.647396] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.664925] env[65503]: DEBUG nova.compute.utils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1017.666789] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c3cee6-3e8b-2b44-3875-e7d22c8df46b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.670077] env[65503]: DEBUG nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1017.670429] env[65503]: DEBUG nova.network.neutron [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1017.670964] env[65503]: WARNING neutronclient.v2_0.client [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1017.671564] env[65503]: WARNING neutronclient.v2_0.client [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1017.672408] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1017.675017] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1017.792535] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "bee97942-afb2-465f-9774-56f5aa8becca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.792535] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.808329] env[65503]: DEBUG oslo_vmware.api [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450513, 'name': PowerOffVM_Task, 'duration_secs': 0.338055} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.808648] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.809061] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.809342] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ca560d2-e848-4475-9547-b5da1f27da53 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.817805] env[65503]: DEBUG nova.policy [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e446ab541084695871cc3feac9835fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19e0e62fe31a46fc802dbfc625ac7645', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1017.825588] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updated VIF entry in instance network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1017.826055] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96370c0c-da", "ovs_interfaceid": "96370c0c-da2e-4229-82a1-a24b799e6402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1017.867737] env[65503]: DEBUG nova.network.neutron [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updated VIF entry in instance network info cache for port 4089f68d-952e-4b5f-8578-0ce2a0afb9f1. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1017.868144] env[65503]: DEBUG nova.network.neutron [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updating instance_info_cache with network_info: [{"id": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "address": "fa:16:3e:d8:f1:dd", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4089f68d-95", "ovs_interfaceid": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1017.905960] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.906204] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.906410] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleting the datastore file [datastore2] ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.906695] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26f4555b-25de-4c77-bd37-d2b49961c303 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.917146] env[65503]: DEBUG oslo_vmware.api [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for the task: (returnval){ [ 1017.917146] env[65503]: value = "task-4450516" [ 1017.917146] env[65503]: _type = "Task" [ 1017.917146] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.930057] env[65503]: DEBUG oslo_vmware.api [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.979578] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ace2fb-6f7b-4955-b70d-21c111adf432 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.989274] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528b2b67-016f-47d1-abf3-89b81ee1763f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.028796] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bec8ddc-7b05-49ac-a594-7cf151c2b4c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.037926] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebafe86e-8009-4854-93ab-ae4d13713709 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.057144] env[65503]: DEBUG nova.compute.provider_tree [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.078606] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450514, 'name': PowerOffVM_Task, 'duration_secs': 0.257502} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.078912] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.086593] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Reconfiguring VM instance instance-00000054 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1018.087468] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4abad681-abc4-44c2-a797-76e6f7da1cbf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.126516] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1018.126516] env[65503]: value = "task-4450517" [ 1018.126516] env[65503]: _type = "Task" [ 1018.126516] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.136119] env[65503]: DEBUG oslo_vmware.api [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450509, 'name': ReconfigVM_Task, 'duration_secs': 2.734074} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.136899] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 2effe3e4-ea22-4d9f-8f5c-38ee661611e3/2effe3e4-ea22-4d9f-8f5c-38ee661611e3.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.137218] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance '2effe3e4-ea22-4d9f-8f5c-38ee661611e3' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1018.146991] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.157832] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c3cee6-3e8b-2b44-3875-e7d22c8df46b, 'name': SearchDatastore_Task, 'duration_secs': 0.014431} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.158133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.158397] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.158670] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce738fc0-a4de-4a3e-9fe8-87ae00788253 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.166447] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1018.166447] env[65503]: value = "task-4450518" [ 1018.166447] env[65503]: _type = "Task" [ 1018.166447] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.175860] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.182322] env[65503]: DEBUG nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1018.259674] env[65503]: DEBUG nova.network.neutron [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Successfully created port: 308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1018.302492] env[65503]: DEBUG nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1018.329778] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Releasing lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.330138] env[65503]: DEBUG nova.compute.manager [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Received event network-changed-c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1018.331038] env[65503]: DEBUG nova.compute.manager [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Refreshing instance network info cache due to event network-changed-c1fca361-555c-407f-bd51-6ea779e02f3a. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1018.331331] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Acquiring lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.331491] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Acquired lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.331654] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Refreshing network info cache for port c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1018.380316] env[65503]: DEBUG oslo_concurrency.lockutils [req-d253610c-3b12-4add-9b27-1f4820006280 req-6e4a2aaa-b2f5-4848-aef3-670c80b3a645 service nova] Releasing lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.430213] env[65503]: DEBUG oslo_vmware.api [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Task: {'id': task-4450516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.38172} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.430516] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.430670] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1018.431253] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1018.431253] env[65503]: INFO nova.compute.manager [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1018.432205] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1018.432205] env[65503]: DEBUG nova.compute.manager [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1018.432205] env[65503]: DEBUG nova.network.neutron [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1018.432205] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1018.433018] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1018.434100] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1018.560385] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1018.564301] env[65503]: DEBUG nova.scheduler.client.report [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.639786] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450517, 'name': ReconfigVM_Task, 'duration_secs': 0.305759} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.640165] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Reconfigured VM instance instance-00000054 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1018.640415] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.640688] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73f252dd-3a85-4206-8e6c-640b5d0a2d9c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.651898] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1018.651898] env[65503]: value = "task-4450519" [ 1018.651898] env[65503]: _type = "Task" [ 1018.651898] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.652845] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9678d43-676a-4710-9632-53ac8a61a46c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.711851] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623bfef6-3cff-475f-be94-fbdb6cd5b7e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.716521] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450519, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.725519] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450518, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.747535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance '2effe3e4-ea22-4d9f-8f5c-38ee661611e3' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1018.829553] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.837413] env[65503]: WARNING neutronclient.v2_0.client [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1018.838140] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1018.838486] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1018.986860] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.987133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.168384] env[65503]: DEBUG oslo_vmware.api [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450519, 'name': PowerOnVM_Task, 'duration_secs': 0.465705} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.173348] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1019.173624] env[65503]: DEBUG nova.compute.manager [None req-18971c53-34a8-44a9-966c-c3d755b45b44 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1019.174886] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3e43bd-4fa2-49f2-b70d-40bda1835bef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.203367] env[65503]: DEBUG nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1019.209865] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660908} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.210142] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.210314] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.211328] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b8af9ea-95e6-4c01-b28f-9d421fc65d38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.219602] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1019.219602] env[65503]: value = "task-4450520" [ 1019.219602] env[65503]: _type = "Task" [ 1019.219602] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.232260] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.243631] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1019.243870] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.243870] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1019.244140] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.244228] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1019.244408] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1019.244633] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1019.244811] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1019.244991] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1019.245174] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1019.245343] env[65503]: DEBUG nova.virt.hardware [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1019.246970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7dbb02-5d33-4a59-a397-e27f51cf7cbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.256701] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5e469d-511d-4414-81cd-d9dfc80d306a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.262535] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1019.263209] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1019.276880] env[65503]: WARNING neutronclient.v2_0.client [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1019.370762] env[65503]: DEBUG nova.network.neutron [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Port e2945b8a-8327-4ac8-8d42-fc828663c0e0 binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1019.388067] env[65503]: DEBUG nova.network.neutron [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1019.425064] env[65503]: WARNING neutronclient.v2_0.client [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1019.425829] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1019.426206] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1019.489986] env[65503]: DEBUG nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1019.502773] env[65503]: DEBUG nova.compute.manager [req-44abd217-e289-48dc-a751-2b564761f83b req-8de9c6d2-9f78-4047-a827-b49a6eb26b9f service nova] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Received event network-vif-deleted-73e98445-c951-4dc2-82e3-537e2196f82a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1019.577443] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.415s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.579556] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.750s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.583374] env[65503]: INFO nova.compute.claims [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.601683] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Updated VIF entry in instance network info cache for port c1fca361-555c-407f-bd51-6ea779e02f3a. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1019.602092] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Updating instance_info_cache with network_info: [{"id": "c1fca361-555c-407f-bd51-6ea779e02f3a", "address": "fa:16:3e:ce:a1:55", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fca361-55", "ovs_interfaceid": "c1fca361-555c-407f-bd51-6ea779e02f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1019.731365] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118724} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.731638] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.732523] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e286f08b-b090-43ce-9c76-18d346dc997e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.761272] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.761816] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bbc4122-5d0a-4d96-a382-b511bf40c385 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.787733] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1019.787733] env[65503]: value = "task-4450521" [ 1019.787733] env[65503]: _type = "Task" [ 1019.787733] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.797801] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.859663] env[65503]: DEBUG nova.compute.manager [req-26495c05-4af7-4321-ad47-f1f521dc9ef7 req-48a09744-f260-4d6d-9b20-d428e2efbe5c service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Received event network-vif-plugged-308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1019.859663] env[65503]: DEBUG oslo_concurrency.lockutils [req-26495c05-4af7-4321-ad47-f1f521dc9ef7 req-48a09744-f260-4d6d-9b20-d428e2efbe5c service nova] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.859663] env[65503]: DEBUG oslo_concurrency.lockutils [req-26495c05-4af7-4321-ad47-f1f521dc9ef7 req-48a09744-f260-4d6d-9b20-d428e2efbe5c service nova] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.859663] env[65503]: DEBUG oslo_concurrency.lockutils [req-26495c05-4af7-4321-ad47-f1f521dc9ef7 req-48a09744-f260-4d6d-9b20-d428e2efbe5c service nova] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.859663] env[65503]: DEBUG nova.compute.manager [req-26495c05-4af7-4321-ad47-f1f521dc9ef7 req-48a09744-f260-4d6d-9b20-d428e2efbe5c service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] No waiting events found dispatching network-vif-plugged-308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1019.877258] env[65503]: WARNING nova.compute.manager [req-26495c05-4af7-4321-ad47-f1f521dc9ef7 req-48a09744-f260-4d6d-9b20-d428e2efbe5c service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Received unexpected event network-vif-plugged-308ddfd2-c7be-4ccb-afb0-0baeec362526 for instance with vm_state building and task_state spawning. [ 1019.892150] env[65503]: INFO nova.compute.manager [-] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Took 1.46 seconds to deallocate network for instance. [ 1019.927476] env[65503]: DEBUG nova.network.neutron [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Successfully updated port: 308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1019.977527] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a05b22-a5b7-d744-41d5-3483cdbfaa49/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1019.977527] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bdac88-1dc2-4895-98e1-7125128c40da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.982728] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a05b22-a5b7-d744-41d5-3483cdbfaa49/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1019.982728] env[65503]: ERROR oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a05b22-a5b7-d744-41d5-3483cdbfaa49/disk-0.vmdk due to incomplete transfer. [ 1019.982931] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a20686b2-1c38-4aaf-ae95-e053a4f604f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.993743] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a05b22-a5b7-d744-41d5-3483cdbfaa49/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1019.993993] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Uploaded image ba6b58dd-a86f-4241-b14b-e9590b647969 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1019.998546] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1020.001873] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-84cfd6fd-59b1-43bc-9dc8-08d2eb909105 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.013659] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1020.013659] env[65503]: value = "task-4450522" [ 1020.013659] env[65503]: _type = "Task" [ 1020.013659] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.022479] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.034694] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.035334] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.036702] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450522, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.104960] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Releasing lock "refresh_cache-fcdcabb9-f076-4fa9-ac30-3220eb6064da" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.105311] env[65503]: DEBUG nova.compute.manager [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Received event network-changed-4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1020.105512] env[65503]: DEBUG nova.compute.manager [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Refreshing instance network info cache due to event network-changed-4089f68d-952e-4b5f-8578-0ce2a0afb9f1. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1020.105828] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Acquiring lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.106054] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Acquired lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.106311] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Refreshing network info cache for port 4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1020.152979] env[65503]: INFO nova.scheduler.client.report [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted allocation for migration dd9bd3db-cf9a-45b6-85df-7df46e78559e [ 1020.303843] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450521, 'name': ReconfigVM_Task, 'duration_secs': 0.291712} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.304369] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfigured VM instance instance-0000005a to attach disk [datastore2] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.305265] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2699f4a5-2ebe-4f43-bc1e-6fec702704fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.314564] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1020.314564] env[65503]: value = "task-4450523" [ 1020.314564] env[65503]: _type = "Task" [ 1020.314564] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.327016] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450523, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.399493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.399493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.399493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.400433] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.429322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.429322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.429500] env[65503]: DEBUG nova.network.neutron [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1020.525096] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450522, 'name': Destroy_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.538810] env[65503]: DEBUG nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1020.609551] env[65503]: WARNING neutronclient.v2_0.client [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1020.610314] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1020.610890] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1020.660780] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f8da4b4a-0563-4c94-a1e2-27b3600bc20c tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.986s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.748741] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1020.748741] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1020.820696] env[65503]: WARNING neutronclient.v2_0.client [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1020.821405] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1020.821824] env[65503]: WARNING openstack [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1020.840357] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450523, 'name': Rename_Task, 'duration_secs': 0.271502} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.840357] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.840357] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e2e1410-b39f-4e53-95dc-7e4d97205247 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.846497] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e61be4-5047-4b83-88dd-1b1b66bfedb6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.850403] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1020.850403] env[65503]: value = "task-4450524" [ 1020.850403] env[65503]: _type = "Task" [ 1020.850403] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.857592] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7989e01b-0ced-4eaa-877f-2e05823db2b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.864373] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450524, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.898102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ec7da6-89ac-4f8f-9234-017b36310084 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.909159] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381bda6a-c2e3-4dc6-849b-1c6ed375a4a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.926325] env[65503]: DEBUG nova.compute.provider_tree [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.931689] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1020.932187] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1020.944503] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updated VIF entry in instance network info cache for port 4089f68d-952e-4b5f-8578-0ce2a0afb9f1. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1020.945257] env[65503]: DEBUG nova.network.neutron [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updating instance_info_cache with network_info: [{"id": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "address": "fa:16:3e:d8:f1:dd", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4089f68d-95", "ovs_interfaceid": "4089f68d-952e-4b5f-8578-0ce2a0afb9f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1020.990869] env[65503]: DEBUG nova.network.neutron [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1021.011788] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1021.012230] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1021.029356] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450522, 'name': Destroy_Task, 'duration_secs': 0.673302} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.029615] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Destroyed the VM [ 1021.029843] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1021.030131] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-64569fbb-7286-4f02-87a4-28fb40f57f29 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.038201] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1021.038201] env[65503]: value = "task-4450525" [ 1021.038201] env[65503]: _type = "Task" [ 1021.038201] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.053610] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450525, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.072511] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.114835] env[65503]: WARNING neutronclient.v2_0.client [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1021.115540] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1021.115889] env[65503]: WARNING openstack [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1021.230439] env[65503]: DEBUG nova.network.neutron [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updating instance_info_cache with network_info: [{"id": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "address": "fa:16:3e:28:5f:e3", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308ddfd2-c7", "ovs_interfaceid": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1021.278021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.278021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.278021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.278021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.278021] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.278944] env[65503]: INFO nova.compute.manager [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Terminating instance [ 1021.363046] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450524, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.405440] env[65503]: WARNING neutronclient.v2_0.client [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1021.429447] env[65503]: DEBUG nova.scheduler.client.report [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.448922] env[65503]: DEBUG oslo_concurrency.lockutils [req-4d13b8cc-85c7-4bb8-9b33-8634100c3bd7 req-80f2391d-dcb8-4744-9045-89d4625aba70 service nova] Releasing lock "refresh_cache-96d8f433-9b86-422f-88ef-99836fb21f30" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.502033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.502033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.502033] env[65503]: DEBUG nova.network.neutron [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1021.550612] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450525, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.735441] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.736016] env[65503]: DEBUG nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Instance network_info: |[{"id": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "address": "fa:16:3e:28:5f:e3", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308ddfd2-c7", "ovs_interfaceid": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1021.736372] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:5f:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '308ddfd2-c7be-4ccb-afb0-0baeec362526', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.745030] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1021.745839] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.745839] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b5659a9-ddc7-4be6-9cd8-e1bfa9355fc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.768956] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.768956] env[65503]: value = "task-4450526" [ 1021.768956] env[65503]: _type = "Task" [ 1021.768956] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.780023] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450526, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.786247] env[65503]: DEBUG nova.compute.manager [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1021.786247] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.786247] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715f8056-c299-41fc-9222-31eab077cd40 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.796034] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.796270] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24c9f82f-3a44-4627-892f-f322bf20e950 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.866289] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450524, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.911384] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.913227] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.913227] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleting the datastore file [datastore2] 310ee0b5-07ee-4cf0-b262-5e8b473efa3d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.913227] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4552d1d-4d4c-457e-98e2-ddacc81521b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.922176] env[65503]: DEBUG oslo_vmware.api [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1021.922176] env[65503]: value = "task-4450528" [ 1021.922176] env[65503]: _type = "Task" [ 1021.922176] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.932602] env[65503]: DEBUG oslo_vmware.api [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.934784] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.936174] env[65503]: DEBUG nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1021.938271] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.916s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.940096] env[65503]: INFO nova.compute.claims [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.004898] env[65503]: WARNING neutronclient.v2_0.client [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.006312] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1022.006765] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1022.051231] env[65503]: DEBUG oslo_vmware.api [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450525, 'name': RemoveSnapshot_Task, 'duration_secs': 0.710966} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.051667] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1022.052084] env[65503]: INFO nova.compute.manager [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Took 17.85 seconds to snapshot the instance on the hypervisor. [ 1022.107477] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "f5319f15-16eb-468a-a70e-7226963ed219" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.107623] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "f5319f15-16eb-468a-a70e-7226963ed219" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.137982] env[65503]: DEBUG nova.compute.manager [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Received event network-changed-308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1022.138201] env[65503]: DEBUG nova.compute.manager [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Refreshing instance network info cache due to event network-changed-308ddfd2-c7be-4ccb-afb0-0baeec362526. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1022.138408] env[65503]: DEBUG oslo_concurrency.lockutils [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Acquiring lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.138577] env[65503]: DEBUG oslo_concurrency.lockutils [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Acquired lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.138762] env[65503]: DEBUG nova.network.neutron [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Refreshing network info cache for port 308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1022.196618] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1022.197129] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1022.267027] env[65503]: WARNING neutronclient.v2_0.client [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.267789] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1022.268238] env[65503]: WARNING openstack [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1022.287576] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450526, 'name': CreateVM_Task, 'duration_secs': 0.402101} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.287821] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1022.288382] env[65503]: WARNING neutronclient.v2_0.client [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.288806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.288972] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.289354] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1022.289701] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b1fa123-d149-4d64-8dc2-1eebc811f2ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.296617] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1022.296617] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52aa4007-7b6b-876d-f4d2-6604c523f173" [ 1022.296617] env[65503]: _type = "Task" [ 1022.296617] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.309444] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52aa4007-7b6b-876d-f4d2-6604c523f173, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.360036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "82415443-1884-4898-996e-828d23f67f23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.360259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.360467] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "82415443-1884-4898-996e-828d23f67f23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.360658] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.360818] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.363845] env[65503]: INFO nova.compute.manager [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Terminating instance [ 1022.370929] env[65503]: DEBUG oslo_vmware.api [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450524, 'name': PowerOnVM_Task, 'duration_secs': 1.203223} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.371430] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.371672] env[65503]: INFO nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1022.371877] env[65503]: DEBUG nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1022.372771] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66188c1-fd0a-44ef-8265-4250be3dbe05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.409290] env[65503]: DEBUG nova.network.neutron [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1022.433620] env[65503]: DEBUG oslo_vmware.api [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271591} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.434096] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.434361] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.434570] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.434809] env[65503]: INFO nova.compute.manager [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1022.435145] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1022.435353] env[65503]: DEBUG nova.compute.manager [-] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1022.435507] env[65503]: DEBUG nova.network.neutron [-] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1022.435853] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.436603] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1022.436909] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1022.447689] env[65503]: DEBUG nova.compute.utils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1022.449741] env[65503]: DEBUG nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1022.449965] env[65503]: DEBUG nova.network.neutron [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1022.450399] env[65503]: WARNING neutronclient.v2_0.client [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.450838] env[65503]: WARNING neutronclient.v2_0.client [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.452268] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1022.452268] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1022.528775] env[65503]: DEBUG nova.policy [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55db708d2a9b47baa25cafed2be1ba91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '463e93d05e1e4b27a3dc866a5b1991d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1022.532074] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.557182] env[65503]: DEBUG nova.compute.manager [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Instance disappeared during snapshot {{(pid=65503) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4635}} [ 1022.574121] env[65503]: DEBUG nova.compute.manager [None req-aa56eb4f-e6c7-4802-b58a-9f21794514fc tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image not found during clean up ba6b58dd-a86f-4241-b14b-e9590b647969 {{(pid=65503) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4641}} [ 1022.614027] env[65503]: DEBUG nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1022.642574] env[65503]: WARNING neutronclient.v2_0.client [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.644067] env[65503]: WARNING openstack [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1022.644552] env[65503]: WARNING openstack [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1022.813830] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52aa4007-7b6b-876d-f4d2-6604c523f173, 'name': SearchDatastore_Task, 'duration_secs': 0.019312} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.814469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.814861] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.815322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.815635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.816384] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.816384] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efd1d31d-d9a6-446f-b5c8-1aa543f3b72c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.819762] env[65503]: DEBUG nova.network.neutron [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Successfully created port: 7309ef3a-1ef4-4d05-a35b-8aecb1167266 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1022.826326] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1022.827329] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1022.827390] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d073ab4d-f932-4b55-bc8b-5c881d0ff9fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.834131] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1022.834131] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cdfce7-9506-9f80-3dcb-7f4639daec89" [ 1022.834131] env[65503]: _type = "Task" [ 1022.834131] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.843809] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cdfce7-9506-9f80-3dcb-7f4639daec89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.876984] env[65503]: DEBUG nova.compute.manager [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1022.877237] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1022.878987] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2d5cb9-a5c8-4510-8038-9c6932bc096e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.893476] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.900398] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e70f1f31-3b54-4261-a9ab-b3ffd66139c7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.902669] env[65503]: INFO nova.compute.manager [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Took 19.83 seconds to build instance. [ 1022.912867] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.917289] env[65503]: DEBUG oslo_vmware.api [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1022.917289] env[65503]: value = "task-4450529" [ 1022.917289] env[65503]: _type = "Task" [ 1022.917289] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.931215] env[65503]: DEBUG oslo_vmware.api [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450529, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.962443] env[65503]: DEBUG nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1023.063205] env[65503]: WARNING openstack [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1023.063205] env[65503]: WARNING openstack [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1023.076551] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.076788] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.143202] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.152295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "bcc91c22-5f92-4233-a293-54f2790a750b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.152360] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "bcc91c22-5f92-4233-a293-54f2790a750b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.272456] env[65503]: WARNING neutronclient.v2_0.client [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1023.273157] env[65503]: WARNING openstack [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1023.273542] env[65503]: WARNING openstack [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1023.286186] env[65503]: DEBUG nova.network.neutron [-] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1023.328699] env[65503]: DEBUG nova.compute.manager [req-fad1b4ad-8896-41ab-8ac5-8fd8069c8fb0 req-104cb743-c19f-45b9-a430-b424a781731b service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Received event network-vif-deleted-4d06e429-4f16-43d7-a443-99c8a67b3e4d {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1023.328955] env[65503]: INFO nova.compute.manager [req-fad1b4ad-8896-41ab-8ac5-8fd8069c8fb0 req-104cb743-c19f-45b9-a430-b424a781731b service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Neutron deleted interface 4d06e429-4f16-43d7-a443-99c8a67b3e4d; detaching it from the instance and deleting it from the info cache [ 1023.329208] env[65503]: DEBUG nova.network.neutron [req-fad1b4ad-8896-41ab-8ac5-8fd8069c8fb0 req-104cb743-c19f-45b9-a430-b424a781731b service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1023.343141] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa533db-8c04-4b8c-a96c-91323e4ad985 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.356868] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cdfce7-9506-9f80-3dcb-7f4639daec89, 'name': SearchDatastore_Task, 'duration_secs': 0.010734} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.361951] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2076fdd2-4a21-443b-baff-85196258246d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.365293] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9fa7d9-6c61-46b3-aada-8aa7d3e31201 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.373650] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1023.373650] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529a5bd1-0b8a-fc18-5d98-38c923d487aa" [ 1023.373650] env[65503]: _type = "Task" [ 1023.373650] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.408146] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e2eb6e-0d6d-43e6-84b6-1fd85d950fb3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.411012] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e57cb268-8d24-4d65-bd1a-967c503b0316 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.354s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.421267] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d7e000-08cf-436e-9a02-65f0603771d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.428596] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529a5bd1-0b8a-fc18-5d98-38c923d487aa, 'name': SearchDatastore_Task, 'duration_secs': 0.01124} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.434733] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.434815] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f89ca00e-d54e-4040-bf18-9a5ec96378d5/f89ca00e-d54e-4040-bf18-9a5ec96378d5.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.436399] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ce7c4cf-08cf-4973-8165-67aff6fc38f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.447938] env[65503]: DEBUG nova.compute.provider_tree [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.454042] env[65503]: DEBUG oslo_vmware.api [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450529, 'name': PowerOffVM_Task, 'duration_secs': 0.352903} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.455547] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.455733] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1023.456583] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6aa28a3-4174-4e77-a446-c14208235d2a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.461933] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce13673c-5d65-4c49-8c11-7cbfe578e89c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.463744] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1023.463744] env[65503]: value = "task-4450530" [ 1023.463744] env[65503]: _type = "Task" [ 1023.463744] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.466090] env[65503]: DEBUG nova.network.neutron [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updated VIF entry in instance network info cache for port 308ddfd2-c7be-4ccb-afb0-0baeec362526. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1023.466306] env[65503]: DEBUG nova.network.neutron [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updating instance_info_cache with network_info: [{"id": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "address": "fa:16:3e:28:5f:e3", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308ddfd2-c7", "ovs_interfaceid": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1023.492498] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed3f2e3-6f00-4a6a-ac7d-d8035c9b1403 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.504783] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.508749] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance '2effe3e4-ea22-4d9f-8f5c-38ee661611e3' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1023.559073] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1023.559354] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1023.559485] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleting the datastore file [datastore2] 82415443-1884-4898-996e-828d23f67f23 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.559789] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c06c9bb-ae4c-4a7b-a462-0ceed3cee989 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.568150] env[65503]: DEBUG oslo_vmware.api [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1023.568150] env[65503]: value = "task-4450532" [ 1023.568150] env[65503]: _type = "Task" [ 1023.568150] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.578282] env[65503]: DEBUG oslo_vmware.api [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.580142] env[65503]: DEBUG nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1023.656373] env[65503]: DEBUG nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1023.795197] env[65503]: INFO nova.compute.manager [-] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Took 1.36 seconds to deallocate network for instance. [ 1023.834124] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-253d2e49-7070-493a-ae09-a888a847777b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.850778] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc13da5-bb48-4e70-ae45-13a8f59011ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.889731] env[65503]: DEBUG nova.compute.manager [req-fad1b4ad-8896-41ab-8ac5-8fd8069c8fb0 req-104cb743-c19f-45b9-a430-b424a781731b service nova] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Detach interface failed, port_id=4d06e429-4f16-43d7-a443-99c8a67b3e4d, reason: Instance 310ee0b5-07ee-4cf0-b262-5e8b473efa3d could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1023.951700] env[65503]: DEBUG nova.scheduler.client.report [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.968675] env[65503]: DEBUG oslo_concurrency.lockutils [req-a8c2756a-b846-48f9-a951-87b0bfbe1fb6 req-8d187835-f5a8-437c-940f-f0b366e5f945 service nova] Releasing lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.972689] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.972934] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.975008] env[65503]: DEBUG nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1023.996144] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473769} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.996144] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f89ca00e-d54e-4040-bf18-9a5ec96378d5/f89ca00e-d54e-4040-bf18-9a5ec96378d5.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.996673] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.997369] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f5d02c8-4b83-4a86-ab4d-d351c93908f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.009313] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1024.009313] env[65503]: value = "task-4450533" [ 1024.009313] env[65503]: _type = "Task" [ 1024.009313] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.019639] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb276468-51ce-4e11-841c-3d3b40a88473 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance '2effe3e4-ea22-4d9f-8f5c-38ee661611e3' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1024.030321] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450533, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.032738] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1024.033195] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.033195] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1024.033333] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.033420] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1024.033557] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1024.033788] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1024.033988] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1024.034241] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1024.034861] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1024.034861] env[65503]: DEBUG nova.virt.hardware [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1024.035802] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b274681e-f7d5-4ed6-9c00-aed8726b0af1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.050597] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c89edb-9e8b-4a1d-aebe-71bea0feaa25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.082482] env[65503]: DEBUG oslo_vmware.api [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.43098} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.082482] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.082768] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.082768] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.083246] env[65503]: INFO nova.compute.manager [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: 82415443-1884-4898-996e-828d23f67f23] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1024.083246] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1024.083641] env[65503]: DEBUG nova.compute.manager [-] [instance: 82415443-1884-4898-996e-828d23f67f23] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1024.083641] env[65503]: DEBUG nova.network.neutron [-] [instance: 82415443-1884-4898-996e-828d23f67f23] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1024.083779] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1024.084496] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1024.084696] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1024.111750] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.177552] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.305227] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.398399] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1024.456931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.457925] env[65503]: DEBUG nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1024.461359] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.060s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.461897] env[65503]: DEBUG nova.objects.instance [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lazy-loading 'resources' on Instance uuid ff256d3f-af88-4f01-bdfd-cf89e06ab364 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.466014] env[65503]: DEBUG nova.compute.manager [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1024.466239] env[65503]: DEBUG nova.compute.manager [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing instance network info cache due to event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1024.466416] env[65503]: DEBUG oslo_concurrency.lockutils [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Acquiring lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.466593] env[65503]: DEBUG oslo_concurrency.lockutils [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Acquired lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.466732] env[65503]: DEBUG nova.network.neutron [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1024.483575] env[65503]: DEBUG nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1024.525316] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450533, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081402} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.531642] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1024.531642] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a985a31-bf1c-47bd-b71f-937c2c965e54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.535374] env[65503]: DEBUG nova.network.neutron [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Successfully updated port: 7309ef3a-1ef4-4d05-a35b-8aecb1167266 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1024.562065] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] f89ca00e-d54e-4040-bf18-9a5ec96378d5/f89ca00e-d54e-4040-bf18-9a5ec96378d5.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.562917] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-214d1e2e-b75a-4c75-b80e-72745f7af155 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.586961] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1024.586961] env[65503]: value = "task-4450534" [ 1024.586961] env[65503]: _type = "Task" [ 1024.586961] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.597115] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450534, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.967129] env[65503]: DEBUG nova.compute.utils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1024.968888] env[65503]: DEBUG nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1024.968888] env[65503]: DEBUG nova.network.neutron [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1024.969237] env[65503]: WARNING neutronclient.v2_0.client [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1024.969564] env[65503]: WARNING neutronclient.v2_0.client [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1024.970163] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1024.970506] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1024.977924] env[65503]: WARNING neutronclient.v2_0.client [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1024.978529] env[65503]: WARNING openstack [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1024.978879] env[65503]: WARNING openstack [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1025.011827] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.025846] env[65503]: DEBUG nova.policy [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9955668c2464ddfb0eae34aa700ddd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '521d40776571452e85178972f97c8622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1025.038119] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.038305] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.038469] env[65503]: DEBUG nova.network.neutron [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1025.105870] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450534, 'name': ReconfigVM_Task, 'duration_secs': 0.2794} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.108613] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfigured VM instance instance-0000005b to attach disk [datastore2] f89ca00e-d54e-4040-bf18-9a5ec96378d5/f89ca00e-d54e-4040-bf18-9a5ec96378d5.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.109873] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75ba6d0f-6d03-4a99-8bfb-2a964e42cf00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.117736] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1025.117736] env[65503]: value = "task-4450535" [ 1025.117736] env[65503]: _type = "Task" [ 1025.117736] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.132024] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450535, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.150636] env[65503]: WARNING openstack [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1025.151145] env[65503]: WARNING openstack [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1025.190489] env[65503]: DEBUG nova.network.neutron [-] [instance: 82415443-1884-4898-996e-828d23f67f23] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1025.224935] env[65503]: WARNING neutronclient.v2_0.client [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1025.225820] env[65503]: WARNING openstack [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1025.226450] env[65503]: WARNING openstack [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1025.322952] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ad37c7-a8b1-4959-8d77-0423f21d3908 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.330848] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6af2c6-7853-458f-8ddf-1cd935b1aaff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.368411] env[65503]: DEBUG nova.network.neutron [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updated VIF entry in instance network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1025.368785] env[65503]: DEBUG nova.network.neutron [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96370c0c-da", "ovs_interfaceid": "96370c0c-da2e-4229-82a1-a24b799e6402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1025.372983] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd20254a-c3f5-4f42-b02c-319587f883ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.375316] env[65503]: DEBUG nova.compute.manager [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Received event network-vif-plugged-7309ef3a-1ef4-4d05-a35b-8aecb1167266 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1025.375532] env[65503]: DEBUG oslo_concurrency.lockutils [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Acquiring lock "bee97942-afb2-465f-9774-56f5aa8becca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.375731] env[65503]: DEBUG oslo_concurrency.lockutils [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Lock "bee97942-afb2-465f-9774-56f5aa8becca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.375888] env[65503]: DEBUG oslo_concurrency.lockutils [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Lock "bee97942-afb2-465f-9774-56f5aa8becca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.376059] env[65503]: DEBUG nova.compute.manager [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] No waiting events found dispatching network-vif-plugged-7309ef3a-1ef4-4d05-a35b-8aecb1167266 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1025.376243] env[65503]: WARNING nova.compute.manager [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Received unexpected event network-vif-plugged-7309ef3a-1ef4-4d05-a35b-8aecb1167266 for instance with vm_state building and task_state spawning. [ 1025.376473] env[65503]: DEBUG nova.compute.manager [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Received event network-changed-7309ef3a-1ef4-4d05-a35b-8aecb1167266 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1025.376565] env[65503]: DEBUG nova.compute.manager [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Refreshing instance network info cache due to event network-changed-7309ef3a-1ef4-4d05-a35b-8aecb1167266. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1025.376814] env[65503]: DEBUG oslo_concurrency.lockutils [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Acquiring lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.380132] env[65503]: DEBUG nova.network.neutron [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Successfully created port: 40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1025.387948] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69585bbf-8dd1-4711-a2ff-27cb101058e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.407791] env[65503]: DEBUG nova.compute.provider_tree [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.478731] env[65503]: DEBUG nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1025.553559] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1025.553559] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1025.602984] env[65503]: DEBUG nova.network.neutron [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1025.629708] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450535, 'name': Rename_Task, 'duration_secs': 0.163185} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.629992] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.630272] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55bcdf69-f6dd-47ed-a282-d3cc8d52c8be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.640867] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1025.640867] env[65503]: value = "task-4450536" [ 1025.640867] env[65503]: _type = "Task" [ 1025.640867] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.654132] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450536, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.681667] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1025.682113] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1025.692377] env[65503]: INFO nova.compute.manager [-] [instance: 82415443-1884-4898-996e-828d23f67f23] Took 1.61 seconds to deallocate network for instance. [ 1025.786056] env[65503]: WARNING neutronclient.v2_0.client [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1025.786056] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1025.786357] env[65503]: WARNING openstack [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1025.879696] env[65503]: DEBUG oslo_concurrency.lockutils [req-0a2524b6-c443-47a1-902f-486f1dd37b1c req-f9fd15d7-75aa-4ece-8a9e-26ae3e160be0 service nova] Releasing lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.912110] env[65503]: DEBUG nova.scheduler.client.report [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.920382] env[65503]: DEBUG nova.network.neutron [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance_info_cache with network_info: [{"id": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "address": "fa:16:3e:b6:80:b2", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309ef3a-1e", "ovs_interfaceid": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1026.155106] env[65503]: DEBUG oslo_vmware.api [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450536, 'name': PowerOnVM_Task, 'duration_secs': 0.507897} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.155547] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.155770] env[65503]: INFO nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Took 6.95 seconds to spawn the instance on the hypervisor. [ 1026.155953] env[65503]: DEBUG nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1026.156926] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84521e56-561e-42e5-b197-e3d99f92e32c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.199479] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.420036] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.958s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.423890] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.351s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.426054] env[65503]: INFO nova.compute.claims [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.429685] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.430199] env[65503]: DEBUG nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Instance network_info: |[{"id": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "address": "fa:16:3e:b6:80:b2", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309ef3a-1e", "ovs_interfaceid": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1026.430662] env[65503]: DEBUG oslo_concurrency.lockutils [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Acquired lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.431031] env[65503]: DEBUG nova.network.neutron [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Refreshing network info cache for port 7309ef3a-1ef4-4d05-a35b-8aecb1167266 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1026.433293] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:80:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7309ef3a-1ef4-4d05-a35b-8aecb1167266', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.442429] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1026.444078] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.444078] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca839687-d4c6-4560-9bf6-ec17d8d1f148 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.461131] env[65503]: INFO nova.scheduler.client.report [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Deleted allocations for instance ff256d3f-af88-4f01-bdfd-cf89e06ab364 [ 1026.474233] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.474233] env[65503]: value = "task-4450537" [ 1026.474233] env[65503]: _type = "Task" [ 1026.474233] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.485188] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450537, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.490144] env[65503]: DEBUG nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1026.501946] env[65503]: DEBUG nova.compute.manager [req-a6e560ae-cbc4-4afe-880a-dabf6e1bfa0b req-fa7925a5-a93e-400a-992e-bf691b69da97 service nova] [instance: 82415443-1884-4898-996e-828d23f67f23] Received event network-vif-deleted-d2e6ffe6-e1d5-4cbf-bd53-d754ec575c8f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1026.523787] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1026.524067] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1026.524307] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1026.524550] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1026.524727] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1026.524906] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1026.525172] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1026.525337] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1026.525495] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1026.525649] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1026.525813] env[65503]: DEBUG nova.virt.hardware [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1026.526991] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3186cb4-2e34-4512-82be-7f64fd8858e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.536830] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babde8e4-ce83-4619-baaf-e4cad134f206 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.560615] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.560904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.561107] env[65503]: DEBUG nova.compute.manager [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Going to confirm migration 5 {{(pid=65503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1026.679467] env[65503]: INFO nova.compute.manager [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Took 13.89 seconds to build instance. [ 1026.935096] env[65503]: WARNING neutronclient.v2_0.client [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1026.935764] env[65503]: WARNING openstack [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1026.936403] env[65503]: WARNING openstack [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1026.947534] env[65503]: DEBUG nova.network.neutron [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Successfully updated port: 40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1026.968251] env[65503]: DEBUG oslo_concurrency.lockutils [None req-73664621-3a8c-4db3-9f08-cd7b28e42a78 tempest-ServersNegativeTestJSON-811523686 tempest-ServersNegativeTestJSON-811523686-project-member] Lock "ff256d3f-af88-4f01-bdfd-cf89e06ab364" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.215s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.987982] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450537, 'name': CreateVM_Task, 'duration_secs': 0.382018} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.989181] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.989360] env[65503]: WARNING neutronclient.v2_0.client [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1026.990259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.990259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.990523] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1026.991687] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce7d5bd5-7889-4c7b-b337-9802d91d32e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.998951] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1026.998951] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522e5da1-088b-a0a1-349e-bd7b5f2ad3ff" [ 1026.998951] env[65503]: _type = "Task" [ 1026.998951] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.009614] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522e5da1-088b-a0a1-349e-bd7b5f2ad3ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.067665] env[65503]: WARNING neutronclient.v2_0.client [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1027.182270] env[65503]: DEBUG oslo_concurrency.lockutils [None req-94dda160-81f2-4b25-b4cc-587408e7c131 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.414s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.391766] env[65503]: WARNING neutronclient.v2_0.client [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1027.391766] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.391766] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.391766] env[65503]: DEBUG nova.network.neutron [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1027.391766] env[65503]: DEBUG nova.objects.instance [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'info_cache' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.458008] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.458299] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.458537] env[65503]: DEBUG nova.network.neutron [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1027.505897] env[65503]: WARNING openstack [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1027.506322] env[65503]: WARNING openstack [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1027.528129] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522e5da1-088b-a0a1-349e-bd7b5f2ad3ff, 'name': SearchDatastore_Task, 'duration_secs': 0.012124} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.528455] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.528684] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.528913] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.529068] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.529346] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.529645] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b713f1c5-630c-4736-ba69-6a28d56dde3b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.543757] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.543957] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.544785] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3876a97-e34e-4e4a-bc03-c22a9ed90515 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.553065] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1027.553065] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528e233c-0aad-58c2-8d89-8f739a4edde3" [ 1027.553065] env[65503]: _type = "Task" [ 1027.553065] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.577748] env[65503]: DEBUG nova.compute.manager [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Received event network-changed-308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1027.578011] env[65503]: DEBUG nova.compute.manager [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Refreshing instance network info cache due to event network-changed-308ddfd2-c7be-4ccb-afb0-0baeec362526. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1027.578345] env[65503]: DEBUG oslo_concurrency.lockutils [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Acquiring lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.578514] env[65503]: DEBUG oslo_concurrency.lockutils [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Acquired lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.579016] env[65503]: DEBUG nova.network.neutron [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Refreshing network info cache for port 308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1027.584409] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528e233c-0aad-58c2-8d89-8f739a4edde3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.624692] env[65503]: WARNING neutronclient.v2_0.client [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1027.625386] env[65503]: WARNING openstack [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1027.625754] env[65503]: WARNING openstack [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1027.745818] env[65503]: DEBUG nova.network.neutron [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updated VIF entry in instance network info cache for port 7309ef3a-1ef4-4d05-a35b-8aecb1167266. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1027.746323] env[65503]: DEBUG nova.network.neutron [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance_info_cache with network_info: [{"id": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "address": "fa:16:3e:b6:80:b2", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309ef3a-1e", "ovs_interfaceid": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1027.791077] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c562780-e35a-4a12-bf5f-4100009ae8d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.800645] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80aa8eb8-c3c9-4a49-a1a1-1165527ae0e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.847319] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60c283b-89b0-4e80-90e5-18e06158e13f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.857342] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95e35bb-cc9c-4f8d-9ac9-5d5ff2b3e1cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.876308] env[65503]: DEBUG nova.compute.provider_tree [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.961199] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1027.961649] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.004427] env[65503]: DEBUG nova.network.neutron [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1028.028584] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.028974] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.075366] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528e233c-0aad-58c2-8d89-8f739a4edde3, 'name': SearchDatastore_Task, 'duration_secs': 0.029695} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.076298] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae58d1b-8b19-494a-ac78-88814cc37d76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.082061] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1028.082061] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526b6c13-ad32-29fa-7d53-7be9600dfb76" [ 1028.082061] env[65503]: _type = "Task" [ 1028.082061] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.085437] env[65503]: WARNING neutronclient.v2_0.client [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1028.086057] env[65503]: WARNING openstack [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.086402] env[65503]: WARNING openstack [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.101203] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526b6c13-ad32-29fa-7d53-7be9600dfb76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.102385] env[65503]: WARNING neutronclient.v2_0.client [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1028.102996] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.103346] env[65503]: WARNING openstack [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.249488] env[65503]: DEBUG oslo_concurrency.lockutils [req-59afecc9-a3a6-45d3-b82a-380bd036c2be req-8bd97114-cc7c-44ea-a745-ee90b0c79353 service nova] Releasing lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.265988] env[65503]: DEBUG nova.network.neutron [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updating instance_info_cache with network_info: [{"id": "40a9564e-b61f-47ad-9d1b-9494f3514527", "address": "fa:16:3e:79:2e:8d", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a9564e-b6", "ovs_interfaceid": "40a9564e-b61f-47ad-9d1b-9494f3514527", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1028.307689] env[65503]: WARNING openstack [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.308157] env[65503]: WARNING openstack [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.380698] env[65503]: DEBUG nova.scheduler.client.report [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.397493] env[65503]: WARNING neutronclient.v2_0.client [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1028.398294] env[65503]: WARNING openstack [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.398750] env[65503]: WARNING openstack [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.406971] env[65503]: WARNING neutronclient.v2_0.client [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1028.407595] env[65503]: WARNING openstack [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.407983] env[65503]: WARNING openstack [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.535725] env[65503]: DEBUG nova.compute.manager [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Received event network-vif-plugged-40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1028.536096] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Acquiring lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.536096] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.536972] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.537218] env[65503]: DEBUG nova.compute.manager [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] No waiting events found dispatching network-vif-plugged-40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1028.537426] env[65503]: WARNING nova.compute.manager [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Received unexpected event network-vif-plugged-40a9564e-b61f-47ad-9d1b-9494f3514527 for instance with vm_state building and task_state spawning. [ 1028.537632] env[65503]: DEBUG nova.compute.manager [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Received event network-changed-40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1028.537808] env[65503]: DEBUG nova.compute.manager [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Refreshing instance network info cache due to event network-changed-40a9564e-b61f-47ad-9d1b-9494f3514527. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1028.538013] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Acquiring lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.564043] env[65503]: DEBUG nova.network.neutron [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updated VIF entry in instance network info cache for port 308ddfd2-c7be-4ccb-afb0-0baeec362526. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1028.564456] env[65503]: DEBUG nova.network.neutron [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updating instance_info_cache with network_info: [{"id": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "address": "fa:16:3e:28:5f:e3", "network": {"id": "4abe123f-4f80-42e1-8dd0-fcf69e26c177", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-424144792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19e0e62fe31a46fc802dbfc625ac7645", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308ddfd2-c7", "ovs_interfaceid": "308ddfd2-c7be-4ccb-afb0-0baeec362526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1028.594167] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526b6c13-ad32-29fa-7d53-7be9600dfb76, 'name': SearchDatastore_Task, 'duration_secs': 0.029215} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.594489] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.594792] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bee97942-afb2-465f-9774-56f5aa8becca/bee97942-afb2-465f-9774-56f5aa8becca.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.595099] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db1a5789-5986-432c-90ef-a35f8a94acf0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.608909] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1028.608909] env[65503]: value = "task-4450538" [ 1028.608909] env[65503]: _type = "Task" [ 1028.608909] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.618859] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.665644] env[65503]: WARNING openstack [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.666046] env[65503]: WARNING openstack [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.730346] env[65503]: WARNING neutronclient.v2_0.client [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1028.731078] env[65503]: WARNING openstack [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.731303] env[65503]: WARNING openstack [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.769134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.769637] env[65503]: DEBUG nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Instance network_info: |[{"id": "40a9564e-b61f-47ad-9d1b-9494f3514527", "address": "fa:16:3e:79:2e:8d", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a9564e-b6", "ovs_interfaceid": "40a9564e-b61f-47ad-9d1b-9494f3514527", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1028.770016] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Acquired lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.770224] env[65503]: DEBUG nova.network.neutron [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Refreshing network info cache for port 40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1028.771673] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:2e:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a7ba8d0-0208-4af7-af44-2a5ad382f9be', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40a9564e-b61f-47ad-9d1b-9494f3514527', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.780232] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating folder: Project (521d40776571452e85178972f97c8622). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1028.781695] env[65503]: WARNING neutronclient.v2_0.client [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1028.782441] env[65503]: WARNING openstack [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.782836] env[65503]: WARNING openstack [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1028.791269] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f3fe9c2-b1a1-495d-bea6-8431198541fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.804881] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created folder: Project (521d40776571452e85178972f97c8622) in parent group-v870190. [ 1028.805120] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating folder: Instances. Parent ref: group-v870449. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1028.805356] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddcebf12-c211-445b-a044-56ae92c83e41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.817401] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created folder: Instances in parent group-v870449. [ 1028.817666] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1028.817864] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1028.818104] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-609f9242-ad51-483c-8902-398d9d8c5b4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.841098] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.841098] env[65503]: value = "task-4450541" [ 1028.841098] env[65503]: _type = "Task" [ 1028.841098] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.852514] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450541, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.886636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.887291] env[65503]: DEBUG nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1028.891327] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.748s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.896599] env[65503]: INFO nova.compute.claims [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.919062] env[65503]: DEBUG nova.network.neutron [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1028.957698] env[65503]: WARNING openstack [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1028.958116] env[65503]: WARNING openstack [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1029.031696] env[65503]: WARNING neutronclient.v2_0.client [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1029.032611] env[65503]: WARNING openstack [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1029.032994] env[65503]: WARNING openstack [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1029.067766] env[65503]: DEBUG oslo_concurrency.lockutils [req-677fdb67-e17f-427e-a559-f64cb9f28adf req-240e6ee1-5ddf-4fe4-afa6-7c02e8eb48b7 service nova] Releasing lock "refresh_cache-f89ca00e-d54e-4040-bf18-9a5ec96378d5" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.119888] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450538, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.124275] env[65503]: DEBUG nova.network.neutron [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updated VIF entry in instance network info cache for port 40a9564e-b61f-47ad-9d1b-9494f3514527. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1029.124691] env[65503]: DEBUG nova.network.neutron [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updating instance_info_cache with network_info: [{"id": "40a9564e-b61f-47ad-9d1b-9494f3514527", "address": "fa:16:3e:79:2e:8d", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a9564e-b6", "ovs_interfaceid": "40a9564e-b61f-47ad-9d1b-9494f3514527", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1029.355738] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450541, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.405274] env[65503]: DEBUG nova.compute.utils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1029.408485] env[65503]: DEBUG nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1029.408675] env[65503]: DEBUG nova.network.neutron [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1029.408991] env[65503]: WARNING neutronclient.v2_0.client [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1029.409307] env[65503]: WARNING neutronclient.v2_0.client [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1029.409878] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1029.410286] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1029.424987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.425177] env[65503]: DEBUG nova.objects.instance [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'migration_context' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.465122] env[65503]: DEBUG nova.policy [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1029.623592] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450538, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58426} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.623948] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bee97942-afb2-465f-9774-56f5aa8becca/bee97942-afb2-465f-9774-56f5aa8becca.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1029.624255] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.624597] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b748b387-cea0-4382-9b7f-defd20588cd9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.632943] env[65503]: DEBUG oslo_concurrency.lockutils [req-b6dd05e7-3103-4606-ba7e-2a85c2b11abb req-54be9a85-7c18-4f0a-a4a9-ed19cd7547be service nova] Releasing lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.633435] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1029.633435] env[65503]: value = "task-4450542" [ 1029.633435] env[65503]: _type = "Task" [ 1029.633435] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.642922] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.779213] env[65503]: DEBUG nova.network.neutron [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Successfully created port: b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1029.854912] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450541, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.923246] env[65503]: DEBUG nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1029.932343] env[65503]: DEBUG nova.objects.base [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Object Instance<2effe3e4-ea22-4d9f-8f5c-38ee661611e3> lazy-loaded attributes: info_cache,migration_context {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1029.932343] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1956a09-d3f4-4433-b2ac-63a0a2644af0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.970420] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0712162a-0983-462f-a22d-52218f5b3aef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.978455] env[65503]: DEBUG oslo_vmware.api [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1029.978455] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524ee188-a078-3589-39d8-bcb0b6e6ccc4" [ 1029.978455] env[65503]: _type = "Task" [ 1029.978455] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.995814] env[65503]: DEBUG oslo_vmware.api [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524ee188-a078-3589-39d8-bcb0b6e6ccc4, 'name': SearchDatastore_Task, 'duration_secs': 0.009519} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.996144] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.144279] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.227828} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.146962] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.148209] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e53868f-4f24-43b4-ba4b-925edc9f0d31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.171299] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] bee97942-afb2-465f-9774-56f5aa8becca/bee97942-afb2-465f-9774-56f5aa8becca.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.174233] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d904e8a-f11f-4933-ad73-41900ee1b60f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.197102] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1030.197102] env[65503]: value = "task-4450543" [ 1030.197102] env[65503]: _type = "Task" [ 1030.197102] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.207917] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.241913] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed07fb1-11ec-4786-afab-d68f879296c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.250793] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b35c9a-7c14-4d0f-9c5d-eae5192c3dc9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.285522] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf12a50c-1e20-4f74-b317-2378eb3c7c76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.296977] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35b9bef-a38c-4e51-b7bf-3268648c63a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.313806] env[65503]: DEBUG nova.compute.provider_tree [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.353776] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450541, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.708230] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.782121] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.782121] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.818198] env[65503]: DEBUG nova.scheduler.client.report [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.854607] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450541, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.934788] env[65503]: DEBUG nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1030.963223] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1030.963470] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1030.963616] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1030.963791] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1030.963931] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1030.964113] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1030.964330] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1030.964482] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1030.964641] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1030.964796] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1030.964960] env[65503]: DEBUG nova.virt.hardware [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1030.965880] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0dbfae-ac7e-4817-8e6d-e9465533a72d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.974926] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7566f8-b955-441f-8be2-b73b30efdbdf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.209452] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.252881] env[65503]: DEBUG nova.compute.manager [req-0dc8efd2-a015-4381-9e5a-e3ca2acbd263 req-3e94206e-6fe7-49eb-a419-cae475f57559 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-vif-plugged-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1031.253202] env[65503]: DEBUG oslo_concurrency.lockutils [req-0dc8efd2-a015-4381-9e5a-e3ca2acbd263 req-3e94206e-6fe7-49eb-a419-cae475f57559 service nova] Acquiring lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.253308] env[65503]: DEBUG oslo_concurrency.lockutils [req-0dc8efd2-a015-4381-9e5a-e3ca2acbd263 req-3e94206e-6fe7-49eb-a419-cae475f57559 service nova] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.253575] env[65503]: DEBUG oslo_concurrency.lockutils [req-0dc8efd2-a015-4381-9e5a-e3ca2acbd263 req-3e94206e-6fe7-49eb-a419-cae475f57559 service nova] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.253749] env[65503]: DEBUG nova.compute.manager [req-0dc8efd2-a015-4381-9e5a-e3ca2acbd263 req-3e94206e-6fe7-49eb-a419-cae475f57559 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] No waiting events found dispatching network-vif-plugged-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1031.253912] env[65503]: WARNING nova.compute.manager [req-0dc8efd2-a015-4381-9e5a-e3ca2acbd263 req-3e94206e-6fe7-49eb-a419-cae475f57559 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received unexpected event network-vif-plugged-b5a654cb-c44f-45fc-bf0c-429ff06916f0 for instance with vm_state building and task_state spawning. [ 1031.290215] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.290215] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.290215] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.293198] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.293198] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.293198] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.293198] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1031.293198] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.324331] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.324932] env[65503]: DEBUG nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1031.327714] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.216s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.329602] env[65503]: INFO nova.compute.claims [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1031.340459] env[65503]: DEBUG nova.network.neutron [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Successfully updated port: b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1031.356620] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450541, 'name': CreateVM_Task, 'duration_secs': 2.113545} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.356927] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1031.357634] env[65503]: WARNING neutronclient.v2_0.client [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1031.357735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.357912] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.359351] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1031.359351] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5140941e-8efb-4e18-ad52-ad97b2b208ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.364885] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1031.364885] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8b97a-772b-8a8e-04b3-a39b093b1de5" [ 1031.364885] env[65503]: _type = "Task" [ 1031.364885] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.375638] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8b97a-772b-8a8e-04b3-a39b093b1de5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.708613] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450543, 'name': ReconfigVM_Task, 'duration_secs': 1.233598} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.708941] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Reconfigured VM instance instance-0000005c to attach disk [datastore2] bee97942-afb2-465f-9774-56f5aa8becca/bee97942-afb2-465f-9774-56f5aa8becca.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.709464] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96e3b1b9-f1ec-4026-b275-217f9fe5b6c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.717314] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1031.717314] env[65503]: value = "task-4450544" [ 1031.717314] env[65503]: _type = "Task" [ 1031.717314] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.726817] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450544, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.794785] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.834658] env[65503]: DEBUG nova.compute.utils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1031.838618] env[65503]: DEBUG nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1031.838618] env[65503]: DEBUG nova.network.neutron [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1031.838910] env[65503]: WARNING neutronclient.v2_0.client [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1031.839141] env[65503]: WARNING neutronclient.v2_0.client [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1031.839886] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1031.840089] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1031.848104] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.848365] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.848448] env[65503]: DEBUG nova.network.neutron [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1031.877087] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b8b97a-772b-8a8e-04b3-a39b093b1de5, 'name': SearchDatastore_Task, 'duration_secs': 0.012369} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.877485] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.877727] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1031.878058] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.878134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.878290] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1031.878568] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18e36d5e-4fc1-407d-96a1-f710da158f9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.890811] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1031.891013] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1031.891870] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26811e54-6668-4cfd-858c-de3c1ddfbebe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.897710] env[65503]: DEBUG nova.policy [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e97b5208de384c19bbc0e332b67fc4ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c5b0c3771b5434992cd58e1af539bde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1031.903765] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1031.903765] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ee0f5-c81b-cb89-1b72-bb9203be7a6b" [ 1031.903765] env[65503]: _type = "Task" [ 1031.903765] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.912827] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ee0f5-c81b-cb89-1b72-bb9203be7a6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.214758] env[65503]: DEBUG nova.network.neutron [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Successfully created port: 48c4b867-0208-4634-9669-18a33e2018c7 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1032.231597] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450544, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.339475] env[65503]: DEBUG nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1032.352194] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1032.352194] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1032.400541] env[65503]: DEBUG nova.network.neutron [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1032.421067] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ee0f5-c81b-cb89-1b72-bb9203be7a6b, 'name': SearchDatastore_Task, 'duration_secs': 0.009937} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.426095] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbcbe7e3-7de6-4fec-a3c3-51a9cdc9d3d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.434480] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1032.434480] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b46971-8693-09cb-62a1-9d2e0257ef77" [ 1032.434480] env[65503]: _type = "Task" [ 1032.434480] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.444922] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b46971-8693-09cb-62a1-9d2e0257ef77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.453883] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1032.454446] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1032.540917] env[65503]: WARNING neutronclient.v2_0.client [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1032.541632] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1032.542038] env[65503]: WARNING openstack [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1032.630817] env[65503]: DEBUG nova.network.neutron [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1032.646712] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0c11ba-0b45-428e-bc91-6e20f59c3948 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.656148] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf741dd1-dbb1-42f9-aeb6-64257227d47d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.690665] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a10ee03-7eef-4689-85c5-7142bbe3615f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.699460] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf6edaf-e5f8-4ede-803f-91efd7f0b310 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.714161] env[65503]: DEBUG nova.compute.provider_tree [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.728109] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450544, 'name': Rename_Task, 'duration_secs': 0.813724} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.729092] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.729374] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2067de5f-e1aa-4e4a-906e-195264bcc4a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.737774] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1032.737774] env[65503]: value = "task-4450545" [ 1032.737774] env[65503]: _type = "Task" [ 1032.737774] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.747105] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450545, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.945382] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b46971-8693-09cb-62a1-9d2e0257ef77, 'name': SearchDatastore_Task, 'duration_secs': 0.011375} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.945680] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.946017] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] d3ca90c9-3dfa-47a5-b48b-67a45ea26021/d3ca90c9-3dfa-47a5-b48b-67a45ea26021.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1032.946238] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e59cbcf-6d28-4f25-a312-19b649ac9d85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.954983] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1032.954983] env[65503]: value = "task-4450546" [ 1032.954983] env[65503]: _type = "Task" [ 1032.954983] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.963955] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450546, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.135910] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.136378] env[65503]: DEBUG nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Instance network_info: |[{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1033.136858] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:7c:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5a654cb-c44f-45fc-bf0c-429ff06916f0', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.145080] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1033.145431] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.145718] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4211e963-5474-4509-a24a-45d059b96c3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.169938] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.169938] env[65503]: value = "task-4450547" [ 1033.169938] env[65503]: _type = "Task" [ 1033.169938] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.182511] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450547, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.218029] env[65503]: DEBUG nova.scheduler.client.report [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.256721] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450545, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.297555] env[65503]: DEBUG nova.compute.manager [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1033.298251] env[65503]: DEBUG nova.compute.manager [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing instance network info cache due to event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1033.298361] env[65503]: DEBUG oslo_concurrency.lockutils [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.298535] env[65503]: DEBUG oslo_concurrency.lockutils [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.298784] env[65503]: DEBUG nova.network.neutron [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1033.350860] env[65503]: DEBUG nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1033.395776] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1033.396257] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.396505] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1033.397126] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.397362] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1033.397505] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1033.397721] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1033.397866] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1033.398059] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1033.398261] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1033.398643] env[65503]: DEBUG nova.virt.hardware [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1033.399683] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903bca11-d28d-47ed-bf24-52ab19780ab1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.413879] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f86614c-cc44-4a11-8c54-aa7dde179993 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.466951] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450546, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481803} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.467427] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] d3ca90c9-3dfa-47a5-b48b-67a45ea26021/d3ca90c9-3dfa-47a5-b48b-67a45ea26021.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1033.467730] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1033.468046] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c78979a4-5952-472a-a87e-14a7258a1174 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.479158] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1033.479158] env[65503]: value = "task-4450548" [ 1033.479158] env[65503]: _type = "Task" [ 1033.479158] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.489114] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.682925] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450547, 'name': CreateVM_Task, 'duration_secs': 0.399685} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.683125] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.683623] env[65503]: WARNING neutronclient.v2_0.client [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1033.683983] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.684177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.684497] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.684756] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7e8953f-16e1-48c6-8ffd-fac20dce313c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.689985] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1033.689985] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5224070d-f978-7690-b3bb-d13e36f97c76" [ 1033.689985] env[65503]: _type = "Task" [ 1033.689985] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.698400] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5224070d-f978-7690-b3bb-d13e36f97c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.723466] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.724122] env[65503]: DEBUG nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1033.727051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.550s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.728313] env[65503]: INFO nova.compute.claims [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1033.750485] env[65503]: DEBUG oslo_vmware.api [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450545, 'name': PowerOnVM_Task, 'duration_secs': 0.750143} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.750485] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.750485] env[65503]: INFO nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1033.750485] env[65503]: DEBUG nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1033.750699] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0afa66d-9984-4407-b83a-53220e1f338c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.784696] env[65503]: DEBUG nova.network.neutron [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Successfully updated port: 48c4b867-0208-4634-9669-18a33e2018c7 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1033.803208] env[65503]: WARNING neutronclient.v2_0.client [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1033.803891] env[65503]: WARNING openstack [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1033.804346] env[65503]: WARNING openstack [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1033.903330] env[65503]: WARNING openstack [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1033.903699] env[65503]: WARNING openstack [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1033.971754] env[65503]: WARNING neutronclient.v2_0.client [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1033.972569] env[65503]: WARNING openstack [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1033.972909] env[65503]: WARNING openstack [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1033.989856] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075471} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.990190] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1033.991012] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b3cc8a-3c01-4e90-a7f6-018e98418484 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.014268] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] d3ca90c9-3dfa-47a5-b48b-67a45ea26021/d3ca90c9-3dfa-47a5-b48b-67a45ea26021.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1034.014609] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a213b4f-da24-40cc-a58a-8756d2021cb2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.041178] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1034.041178] env[65503]: value = "task-4450549" [ 1034.041178] env[65503]: _type = "Task" [ 1034.041178] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.051965] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450549, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.077135] env[65503]: DEBUG nova.network.neutron [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updated VIF entry in instance network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1034.077606] env[65503]: DEBUG nova.network.neutron [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1034.201702] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5224070d-f978-7690-b3bb-d13e36f97c76, 'name': SearchDatastore_Task, 'duration_secs': 0.028194} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.202119] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.202367] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.202603] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.202741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.202915] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.203226] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76764f7a-d66b-41c5-9ecb-4fc76d3e82de {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.219103] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.219336] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.220577] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c10867ad-734f-4fd3-add1-bdac957c40ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.227752] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1034.227752] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52842b44-d5e3-9a97-1fed-b94c738c53f9" [ 1034.227752] env[65503]: _type = "Task" [ 1034.227752] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.232791] env[65503]: DEBUG nova.compute.utils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1034.234370] env[65503]: DEBUG nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1034.234577] env[65503]: DEBUG nova.network.neutron [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1034.234899] env[65503]: WARNING neutronclient.v2_0.client [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1034.235957] env[65503]: WARNING neutronclient.v2_0.client [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1034.235957] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1034.236229] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1034.249235] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52842b44-d5e3-9a97-1fed-b94c738c53f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.272382] env[65503]: INFO nova.compute.manager [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Took 15.46 seconds to build instance. [ 1034.287743] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "refresh_cache-f5319f15-16eb-468a-a70e-7226963ed219" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.287975] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "refresh_cache-f5319f15-16eb-468a-a70e-7226963ed219" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.288225] env[65503]: DEBUG nova.network.neutron [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1034.293543] env[65503]: DEBUG nova.policy [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9afe8731bc284b6881835aa0a8f6c725', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dbf0e9b08b741e88f971ec5f54dede8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1034.298048] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1034.298307] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1034.360010] env[65503]: DEBUG nova.network.neutron [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1034.381299] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1034.381717] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1034.455329] env[65503]: WARNING neutronclient.v2_0.client [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1034.456361] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1034.456916] env[65503]: WARNING openstack [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1034.553238] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.580589] env[65503]: DEBUG oslo_concurrency.lockutils [req-07169fbf-7dae-457d-8e05-e944d794e7d5 req-3e69e232-4a6e-406e-8d5c-2fb9b4b77143 service nova] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.598493] env[65503]: DEBUG nova.network.neutron [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Updating instance_info_cache with network_info: [{"id": "48c4b867-0208-4634-9669-18a33e2018c7", "address": "fa:16:3e:3d:73:c9", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48c4b867-02", "ovs_interfaceid": "48c4b867-0208-4634-9669-18a33e2018c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1034.638023] env[65503]: DEBUG nova.network.neutron [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Successfully created port: 8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1034.739437] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52842b44-d5e3-9a97-1fed-b94c738c53f9, 'name': SearchDatastore_Task, 'duration_secs': 0.021555} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.740754] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e10a8e4-7ef4-4be5-a80b-29d6dd3019e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.747307] env[65503]: DEBUG nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1034.764185] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1034.764185] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5242d9d3-1efd-c9ce-5dbe-cf3a74489c7e" [ 1034.764185] env[65503]: _type = "Task" [ 1034.764185] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.772808] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5242d9d3-1efd-c9ce-5dbe-cf3a74489c7e, 'name': SearchDatastore_Task, 'duration_secs': 0.018146} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.773125] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.773398] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 1d1a96cc-63b3-472c-b94a-1ea00763f770/1d1a96cc-63b3-472c-b94a-1ea00763f770.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.773677] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2d55e93-cd37-4c6a-b57d-b159a0723f14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.776887] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8968d797-5e8a-40ef-b568-e465a778c0f3 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.985s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.783244] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1034.783244] env[65503]: value = "task-4450550" [ 1034.783244] env[65503]: _type = "Task" [ 1034.783244] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.793435] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.055233] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450549, 'name': ReconfigVM_Task, 'duration_secs': 0.807118} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.055668] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Reconfigured VM instance instance-0000005d to attach disk [datastore2] d3ca90c9-3dfa-47a5-b48b-67a45ea26021/d3ca90c9-3dfa-47a5-b48b-67a45ea26021.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1035.056252] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe7f1dad-6a87-4aac-b26b-3837142d71a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.061042] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718e175f-57af-4c1e-996c-822a5c161718 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.066451] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1035.066451] env[65503]: value = "task-4450551" [ 1035.066451] env[65503]: _type = "Task" [ 1035.066451] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.073812] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a65ffed-ce25-454d-9c74-84756fa7dbb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.081320] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450551, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.113637] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "refresh_cache-f5319f15-16eb-468a-a70e-7226963ed219" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.114087] env[65503]: DEBUG nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Instance network_info: |[{"id": "48c4b867-0208-4634-9669-18a33e2018c7", "address": "fa:16:3e:3d:73:c9", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48c4b867-02", "ovs_interfaceid": "48c4b867-0208-4634-9669-18a33e2018c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1035.114852] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:73:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48c4b867-0208-4634-9669-18a33e2018c7', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.122621] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1035.123339] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe70e498-f631-4af9-9afb-5509d704df44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.126121] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1035.126383] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-058946a1-775d-42f9-bff4-bb6d7c9103d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.148815] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd975f3c-87a4-444f-b685-aff86ddbbf93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.153444] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.153444] env[65503]: value = "task-4450552" [ 1035.153444] env[65503]: _type = "Task" [ 1035.153444] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.166000] env[65503]: DEBUG nova.compute.provider_tree [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.174178] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450552, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.298538] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450550, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.341980] env[65503]: DEBUG nova.compute.manager [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Received event network-vif-plugged-48c4b867-0208-4634-9669-18a33e2018c7 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1035.342218] env[65503]: DEBUG oslo_concurrency.lockutils [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Acquiring lock "f5319f15-16eb-468a-a70e-7226963ed219-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.342857] env[65503]: DEBUG oslo_concurrency.lockutils [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Lock "f5319f15-16eb-468a-a70e-7226963ed219-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.343059] env[65503]: DEBUG oslo_concurrency.lockutils [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Lock "f5319f15-16eb-468a-a70e-7226963ed219-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.343365] env[65503]: DEBUG nova.compute.manager [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] No waiting events found dispatching network-vif-plugged-48c4b867-0208-4634-9669-18a33e2018c7 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1035.343507] env[65503]: WARNING nova.compute.manager [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Received unexpected event network-vif-plugged-48c4b867-0208-4634-9669-18a33e2018c7 for instance with vm_state building and task_state spawning. [ 1035.343686] env[65503]: DEBUG nova.compute.manager [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Received event network-changed-48c4b867-0208-4634-9669-18a33e2018c7 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1035.344137] env[65503]: DEBUG nova.compute.manager [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Refreshing instance network info cache due to event network-changed-48c4b867-0208-4634-9669-18a33e2018c7. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1035.344137] env[65503]: DEBUG oslo_concurrency.lockutils [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Acquiring lock "refresh_cache-f5319f15-16eb-468a-a70e-7226963ed219" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.344352] env[65503]: DEBUG oslo_concurrency.lockutils [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Acquired lock "refresh_cache-f5319f15-16eb-468a-a70e-7226963ed219" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.344537] env[65503]: DEBUG nova.network.neutron [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Refreshing network info cache for port 48c4b867-0208-4634-9669-18a33e2018c7 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1035.578145] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450551, 'name': Rename_Task, 'duration_secs': 0.212397} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.578444] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.578716] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5117857c-c012-462d-904c-434da67e204b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.587182] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1035.587182] env[65503]: value = "task-4450553" [ 1035.587182] env[65503]: _type = "Task" [ 1035.587182] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.597764] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.665362] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450552, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.670118] env[65503]: DEBUG nova.scheduler.client.report [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.767557] env[65503]: DEBUG nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1035.795932] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450550, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.821575} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.796239] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 1d1a96cc-63b3-472c-b94a-1ea00763f770/1d1a96cc-63b3-472c-b94a-1ea00763f770.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.796654] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.796767] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-feb7e653-05d1-48de-9e01-118dfc291040 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.802120] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1035.802450] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1035.802546] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1035.802684] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1035.802818] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1035.803627] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1035.803627] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1035.803627] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1035.803627] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1035.803627] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1035.803860] env[65503]: DEBUG nova.virt.hardware [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1035.805810] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779ff8d8-a3f6-4766-9bdc-8cde5581e3ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.812163] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1035.812163] env[65503]: value = "task-4450554" [ 1035.812163] env[65503]: _type = "Task" [ 1035.812163] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.822905] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d8f5f1-377f-4ee3-9dec-25e40f3815e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.834254] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450554, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.852565] env[65503]: WARNING neutronclient.v2_0.client [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1035.853529] env[65503]: WARNING openstack [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1035.854099] env[65503]: WARNING openstack [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1036.100436] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450553, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.110817] env[65503]: WARNING openstack [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1036.112154] env[65503]: WARNING openstack [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1036.175607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.176368] env[65503]: DEBUG nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1036.179767] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450552, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.180152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.875s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.180388] env[65503]: DEBUG nova.objects.instance [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lazy-loading 'resources' on Instance uuid 310ee0b5-07ee-4cf0-b262-5e8b473efa3d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.252456] env[65503]: WARNING neutronclient.v2_0.client [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1036.254040] env[65503]: WARNING openstack [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1036.257231] env[65503]: WARNING openstack [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1036.274710] env[65503]: DEBUG nova.compute.manager [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1036.285366] env[65503]: DEBUG nova.network.neutron [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Successfully updated port: 8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1036.326028] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450554, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104318} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.330031] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.330031] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29b9896-c486-496d-b43b-e2d6bf08bf95 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.358266] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 1d1a96cc-63b3-472c-b94a-1ea00763f770/1d1a96cc-63b3-472c-b94a-1ea00763f770.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.359483] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20be7905-e5a2-4809-8b93-eafb1c8f3493 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.388797] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1036.388797] env[65503]: value = "task-4450555" [ 1036.388797] env[65503]: _type = "Task" [ 1036.388797] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.403304] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450555, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.431161] env[65503]: DEBUG nova.network.neutron [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Updated VIF entry in instance network info cache for port 48c4b867-0208-4634-9669-18a33e2018c7. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1036.431161] env[65503]: DEBUG nova.network.neutron [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Updating instance_info_cache with network_info: [{"id": "48c4b867-0208-4634-9669-18a33e2018c7", "address": "fa:16:3e:3d:73:c9", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48c4b867-02", "ovs_interfaceid": "48c4b867-0208-4634-9669-18a33e2018c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1036.604221] env[65503]: DEBUG oslo_vmware.api [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450553, 'name': PowerOnVM_Task, 'duration_secs': 0.819636} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.604559] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1036.604772] env[65503]: INFO nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Took 10.11 seconds to spawn the instance on the hypervisor. [ 1036.605053] env[65503]: DEBUG nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1036.605859] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2558e53-c736-4c2f-9a9e-0880f3146b24 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.666599] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450552, 'name': CreateVM_Task, 'duration_secs': 1.488196} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.666817] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.667298] env[65503]: WARNING neutronclient.v2_0.client [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1036.667669] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.667821] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.668150] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1036.668723] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b00099f-acff-48e0-a1e3-32c35e2a7a61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.674589] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1036.674589] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6b7ba-5e9e-e700-34aa-4a9bf4a840b9" [ 1036.674589] env[65503]: _type = "Task" [ 1036.674589] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.681117] env[65503]: DEBUG nova.compute.utils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1036.686085] env[65503]: DEBUG nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1036.686288] env[65503]: DEBUG nova.network.neutron [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1036.686616] env[65503]: WARNING neutronclient.v2_0.client [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1036.686923] env[65503]: WARNING neutronclient.v2_0.client [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1036.687605] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1036.687967] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1036.696920] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6b7ba-5e9e-e700-34aa-4a9bf4a840b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.740695] env[65503]: DEBUG nova.policy [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5203efa0354baca5354d76cf3365c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf54348a3d0948cfa816cc3746e86806', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1036.789837] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.790143] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.790143] env[65503]: DEBUG nova.network.neutron [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1036.792120] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.899855] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450555, 'name': ReconfigVM_Task, 'duration_secs': 0.47955} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.899855] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 1d1a96cc-63b3-472c-b94a-1ea00763f770/1d1a96cc-63b3-472c-b94a-1ea00763f770.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.903348] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1c86823-2779-4684-a024-0483cb91cea5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.912147] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1036.912147] env[65503]: value = "task-4450556" [ 1036.912147] env[65503]: _type = "Task" [ 1036.912147] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.922239] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450556, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.932920] env[65503]: DEBUG oslo_concurrency.lockutils [req-0ea5da88-2aaa-44f5-910c-67be9779c0fa req-6114ab37-d33b-4948-b0fa-b758026060b5 service nova] Releasing lock "refresh_cache-f5319f15-16eb-468a-a70e-7226963ed219" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.966488] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc2211e-965c-42bb-bae0-4ceac89a4fe9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.978957] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75df940-e70e-4e76-abcb-2c89e30813ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.014213] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf8f43c-0e89-4d24-b06f-828ece502412 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.023685] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad63c3b-c9ba-4a39-87b9-012d472c8840 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.045598] env[65503]: DEBUG nova.compute.provider_tree [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.087346] env[65503]: DEBUG nova.network.neutron [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Successfully created port: 17d55d47-c1ec-47a1-b233-b7ea23837bb5 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1037.124731] env[65503]: INFO nova.compute.manager [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Took 17.12 seconds to build instance. [ 1037.186343] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d6b7ba-5e9e-e700-34aa-4a9bf4a840b9, 'name': SearchDatastore_Task, 'duration_secs': 0.020584} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.186578] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.186814] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.187064] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.187225] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.187594] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.187693] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26f7976c-6325-48e2-8dcf-bb175cd8da6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.201293] env[65503]: DEBUG nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1037.202589] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.203105] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.205863] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bfe7303-c00c-438b-bcfa-ce5291a7e772 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.212376] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1037.212376] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52412748-406d-8c93-72c5-764f4ae49cd1" [ 1037.212376] env[65503]: _type = "Task" [ 1037.212376] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.222548] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52412748-406d-8c93-72c5-764f4ae49cd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.295523] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1037.295523] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1037.347846] env[65503]: DEBUG nova.network.neutron [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1037.376046] env[65503]: DEBUG nova.compute.manager [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Received event network-vif-plugged-8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1037.376262] env[65503]: DEBUG oslo_concurrency.lockutils [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Acquiring lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.376505] env[65503]: DEBUG oslo_concurrency.lockutils [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.376702] env[65503]: DEBUG oslo_concurrency.lockutils [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.376926] env[65503]: DEBUG nova.compute.manager [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] No waiting events found dispatching network-vif-plugged-8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1037.376926] env[65503]: WARNING nova.compute.manager [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Received unexpected event network-vif-plugged-8eff3104-0a50-4cce-b7a9-35ca32ee32fd for instance with vm_state building and task_state spawning. [ 1037.377176] env[65503]: DEBUG nova.compute.manager [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Received event network-changed-8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1037.377244] env[65503]: DEBUG nova.compute.manager [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Refreshing instance network info cache due to event network-changed-8eff3104-0a50-4cce-b7a9-35ca32ee32fd. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1037.377380] env[65503]: DEBUG oslo_concurrency.lockutils [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Acquiring lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.382714] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1037.383109] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1037.428614] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450556, 'name': Rename_Task, 'duration_secs': 0.156823} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.428966] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.429235] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4168fd1d-8d1e-4d23-8b65-8d50a93d744b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.437064] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1037.437064] env[65503]: value = "task-4450557" [ 1037.437064] env[65503]: _type = "Task" [ 1037.437064] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.446069] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.471485] env[65503]: WARNING neutronclient.v2_0.client [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1037.472267] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1037.472645] env[65503]: WARNING openstack [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1037.548168] env[65503]: DEBUG nova.scheduler.client.report [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1037.600415] env[65503]: DEBUG nova.network.neutron [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updating instance_info_cache with network_info: [{"id": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "address": "fa:16:3e:9e:b7:dc", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eff3104-0a", "ovs_interfaceid": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1037.626961] env[65503]: DEBUG oslo_concurrency.lockutils [None req-364d8c71-e82d-403f-b8a7-0987fec388cf tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.640s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.724325] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52412748-406d-8c93-72c5-764f4ae49cd1, 'name': SearchDatastore_Task, 'duration_secs': 0.027809} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.725222] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eadd3e73-7b3c-4496-9710-1123d10f51c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.732994] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1037.732994] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f0f7f6-0b70-36c0-03c8-b18d8caed734" [ 1037.732994] env[65503]: _type = "Task" [ 1037.732994] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.741484] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f0f7f6-0b70-36c0-03c8-b18d8caed734, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.947883] env[65503]: DEBUG oslo_vmware.api [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450557, 'name': PowerOnVM_Task, 'duration_secs': 0.501343} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.948243] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.948459] env[65503]: INFO nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Took 7.01 seconds to spawn the instance on the hypervisor. [ 1037.948632] env[65503]: DEBUG nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1037.949635] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d260f96-9e8d-4e35-8fc5-d0b192497b63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.055359] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.059179] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.047s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.061267] env[65503]: INFO nova.compute.claims [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1038.082462] env[65503]: INFO nova.scheduler.client.report [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted allocations for instance 310ee0b5-07ee-4cf0-b262-5e8b473efa3d [ 1038.103818] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Releasing lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.104447] env[65503]: DEBUG nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Instance network_info: |[{"id": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "address": "fa:16:3e:9e:b7:dc", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eff3104-0a", "ovs_interfaceid": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1038.105388] env[65503]: DEBUG oslo_concurrency.lockutils [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Acquired lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.105388] env[65503]: DEBUG nova.network.neutron [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Refreshing network info cache for port 8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1038.107037] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:b7:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0e00b2f1-c70f-4b21-86eb-810643cc1680', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8eff3104-0a50-4cce-b7a9-35ca32ee32fd', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.114941] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1038.116980] env[65503]: WARNING neutronclient.v2_0.client [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1038.117688] env[65503]: WARNING openstack [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1038.118162] env[65503]: WARNING openstack [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1038.125457] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.126416] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6afb2596-6b01-46b7-9b8e-f6a37a910c59 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.149615] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.149615] env[65503]: value = "task-4450558" [ 1038.149615] env[65503]: _type = "Task" [ 1038.149615] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.159223] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450558, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.214314] env[65503]: DEBUG nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1038.249088] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f0f7f6-0b70-36c0-03c8-b18d8caed734, 'name': SearchDatastore_Task, 'duration_secs': 0.011157} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.252967] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.254668] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f5319f15-16eb-468a-a70e-7226963ed219/f5319f15-16eb-468a-a70e-7226963ed219.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.254668] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a69f49e6-1863-428f-a583-1c61a366af11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.261062] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1038.261323] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.261517] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1038.261707] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.261844] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1038.263044] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1038.263500] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1038.263836] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1038.264208] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1038.264556] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1038.264913] env[65503]: DEBUG nova.virt.hardware [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1038.266279] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4004c38a-4bed-4d71-b57a-9d88509b80cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.272572] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1038.272572] env[65503]: value = "task-4450559" [ 1038.272572] env[65503]: _type = "Task" [ 1038.272572] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.283680] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6266ff7-abee-46f8-9ef7-81385c370e6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.300339] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450559, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.471165] env[65503]: INFO nova.compute.manager [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Took 17.43 seconds to build instance. [ 1038.591854] env[65503]: DEBUG oslo_concurrency.lockutils [None req-48bde551-bf3e-4f98-a235-9f9b09586b51 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "310ee0b5-07ee-4cf0-b262-5e8b473efa3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.316s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.631894] env[65503]: WARNING openstack [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1038.632374] env[65503]: WARNING openstack [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1038.661865] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450558, 'name': CreateVM_Task, 'duration_secs': 0.385651} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.662134] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1038.662614] env[65503]: WARNING neutronclient.v2_0.client [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1038.662995] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.663168] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.663632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1038.663771] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a50a2164-d5c1-4144-a569-45a81c79f9b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.672850] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1038.672850] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5274c8aa-75a2-5197-46b0-57335d04683f" [ 1038.672850] env[65503]: _type = "Task" [ 1038.672850] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.688236] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5274c8aa-75a2-5197-46b0-57335d04683f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.696479] env[65503]: DEBUG nova.network.neutron [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Successfully updated port: 17d55d47-c1ec-47a1-b233-b7ea23837bb5 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1038.703331] env[65503]: DEBUG nova.compute.manager [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Received event network-changed-40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1038.703878] env[65503]: DEBUG nova.compute.manager [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Refreshing instance network info cache due to event network-changed-40a9564e-b61f-47ad-9d1b-9494f3514527. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1038.705041] env[65503]: DEBUG oslo_concurrency.lockutils [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Acquiring lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.705041] env[65503]: DEBUG oslo_concurrency.lockutils [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Acquired lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.705041] env[65503]: DEBUG nova.network.neutron [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Refreshing network info cache for port 40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1038.753456] env[65503]: WARNING neutronclient.v2_0.client [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1038.754174] env[65503]: WARNING openstack [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1038.754559] env[65503]: WARNING openstack [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1038.784729] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450559, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517853} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.785049] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f5319f15-16eb-468a-a70e-7226963ed219/f5319f15-16eb-468a-a70e-7226963ed219.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.785352] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.785740] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7898167c-69cd-4215-9206-5c5632b98359 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.792869] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1038.792869] env[65503]: value = "task-4450560" [ 1038.792869] env[65503]: _type = "Task" [ 1038.792869] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.806758] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.856710] env[65503]: DEBUG nova.network.neutron [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updated VIF entry in instance network info cache for port 8eff3104-0a50-4cce-b7a9-35ca32ee32fd. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1038.857176] env[65503]: DEBUG nova.network.neutron [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updating instance_info_cache with network_info: [{"id": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "address": "fa:16:3e:9e:b7:dc", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eff3104-0a", "ovs_interfaceid": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1038.978756] env[65503]: DEBUG oslo_concurrency.lockutils [None req-762a4138-28f7-4505-b132-ecefa2337595 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.944s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.184536] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5274c8aa-75a2-5197-46b0-57335d04683f, 'name': SearchDatastore_Task, 'duration_secs': 0.038652} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.187658] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.187920] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1039.188173] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.188333] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.188508] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.188994] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e95c4883-086a-4c47-ae75-17a8e32f8fca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.203667] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.203876] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1039.207318] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67f0c419-a716-4d5b-bcfa-308920766187 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.213715] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "refresh_cache-bcc91c22-5f92-4233-a293-54f2790a750b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.213941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "refresh_cache-bcc91c22-5f92-4233-a293-54f2790a750b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.214159] env[65503]: DEBUG nova.network.neutron [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1039.216796] env[65503]: WARNING neutronclient.v2_0.client [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1039.217519] env[65503]: WARNING openstack [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1039.217896] env[65503]: WARNING openstack [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1039.226680] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1039.226680] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52988928-ed2f-dbb6-02c2-bc618e7c3708" [ 1039.226680] env[65503]: _type = "Task" [ 1039.226680] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.243156] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52988928-ed2f-dbb6-02c2-bc618e7c3708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.304605] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063552} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.309857] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.313892] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd86e41-4099-4165-8b8c-52a663828984 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.339598] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] f5319f15-16eb-468a-a70e-7226963ed219/f5319f15-16eb-468a-a70e-7226963ed219.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.342961] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ceb6501-c25b-46dd-89ed-79785be61c7d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.362348] env[65503]: WARNING openstack [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1039.362751] env[65503]: WARNING openstack [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1039.370357] env[65503]: DEBUG oslo_concurrency.lockutils [req-b7ef8d41-b704-4e53-9dc0-3f4f8d5637d1 req-db519d47-4c22-45a9-ae81-41414ad7040b service nova] Releasing lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.375624] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1039.375624] env[65503]: value = "task-4450561" [ 1039.375624] env[65503]: _type = "Task" [ 1039.375624] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.385918] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450561, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.390731] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f00d7ad-bb5e-49be-86bc-09d4cf3687d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.402513] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c41f67-8475-46c9-afdb-27a5ba2743f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.444147] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d848a1-b94c-40a0-9f98-bc57be6fed5c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.447931] env[65503]: DEBUG nova.compute.manager [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Received event network-vif-plugged-17d55d47-c1ec-47a1-b233-b7ea23837bb5 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1039.448175] env[65503]: DEBUG oslo_concurrency.lockutils [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Acquiring lock "bcc91c22-5f92-4233-a293-54f2790a750b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.448379] env[65503]: DEBUG oslo_concurrency.lockutils [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Lock "bcc91c22-5f92-4233-a293-54f2790a750b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.448543] env[65503]: DEBUG oslo_concurrency.lockutils [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Lock "bcc91c22-5f92-4233-a293-54f2790a750b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.448707] env[65503]: DEBUG nova.compute.manager [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] No waiting events found dispatching network-vif-plugged-17d55d47-c1ec-47a1-b233-b7ea23837bb5 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1039.448864] env[65503]: WARNING nova.compute.manager [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Received unexpected event network-vif-plugged-17d55d47-c1ec-47a1-b233-b7ea23837bb5 for instance with vm_state building and task_state spawning. [ 1039.449021] env[65503]: DEBUG nova.compute.manager [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Received event network-changed-17d55d47-c1ec-47a1-b233-b7ea23837bb5 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1039.449171] env[65503]: DEBUG nova.compute.manager [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Refreshing instance network info cache due to event network-changed-17d55d47-c1ec-47a1-b233-b7ea23837bb5. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1039.449334] env[65503]: DEBUG oslo_concurrency.lockutils [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Acquiring lock "refresh_cache-bcc91c22-5f92-4233-a293-54f2790a750b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.457495] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab05691-7417-4c8c-8992-6f049ccf08ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.476138] env[65503]: DEBUG nova.compute.provider_tree [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.493457] env[65503]: WARNING neutronclient.v2_0.client [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1039.494185] env[65503]: WARNING openstack [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1039.494568] env[65503]: WARNING openstack [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1039.660217] env[65503]: DEBUG nova.network.neutron [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updated VIF entry in instance network info cache for port 40a9564e-b61f-47ad-9d1b-9494f3514527. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1039.661887] env[65503]: DEBUG nova.network.neutron [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updating instance_info_cache with network_info: [{"id": "40a9564e-b61f-47ad-9d1b-9494f3514527", "address": "fa:16:3e:79:2e:8d", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a9564e-b6", "ovs_interfaceid": "40a9564e-b61f-47ad-9d1b-9494f3514527", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1039.729431] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1039.729683] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1039.759674] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52988928-ed2f-dbb6-02c2-bc618e7c3708, 'name': SearchDatastore_Task, 'duration_secs': 0.053712} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.759831] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-873e07a6-01e2-4f64-bcb6-f9f59c8f8f3f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.767811] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1039.767811] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5281d554-0224-7860-a51c-083a4513d795" [ 1039.767811] env[65503]: _type = "Task" [ 1039.767811] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.778238] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5281d554-0224-7860-a51c-083a4513d795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.779412] env[65503]: DEBUG nova.network.neutron [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1039.799925] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1039.800486] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1039.868683] env[65503]: WARNING neutronclient.v2_0.client [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1039.869377] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1039.869753] env[65503]: WARNING openstack [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1039.887911] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450561, 'name': ReconfigVM_Task, 'duration_secs': 0.321261} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.888245] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Reconfigured VM instance instance-0000005f to attach disk [datastore2] f5319f15-16eb-468a-a70e-7226963ed219/f5319f15-16eb-468a-a70e-7226963ed219.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.888936] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2a5651b-6883-4678-ab53-93724bafa30d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.897270] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1039.897270] env[65503]: value = "task-4450562" [ 1039.897270] env[65503]: _type = "Task" [ 1039.897270] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.906934] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450562, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.963496] env[65503]: DEBUG nova.network.neutron [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Updating instance_info_cache with network_info: [{"id": "17d55d47-c1ec-47a1-b233-b7ea23837bb5", "address": "fa:16:3e:19:60:19", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17d55d47-c1", "ovs_interfaceid": "17d55d47-c1ec-47a1-b233-b7ea23837bb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1039.980960] env[65503]: DEBUG nova.scheduler.client.report [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.170746] env[65503]: DEBUG oslo_concurrency.lockutils [req-425f480d-fbe7-4688-903b-09f1e672ecf7 req-0215964b-c0e7-4e73-95f2-b9a890949f4d service nova] Releasing lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.280344] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5281d554-0224-7860-a51c-083a4513d795, 'name': SearchDatastore_Task, 'duration_secs': 0.05423} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.280564] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.280858] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f50dce3d-4ef2-46e3-9044-c53b55ddbefb/f50dce3d-4ef2-46e3-9044-c53b55ddbefb.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1040.281166] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-260ee090-744c-498a-9763-9a56b025ba5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.290891] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1040.290891] env[65503]: value = "task-4450563" [ 1040.290891] env[65503]: _type = "Task" [ 1040.290891] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.301516] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450563, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.408100] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450562, 'name': Rename_Task, 'duration_secs': 0.147664} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.408100] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.408100] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87ef5db7-55a1-4ce0-bf94-62a9352c6984 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.415247] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1040.415247] env[65503]: value = "task-4450564" [ 1040.415247] env[65503]: _type = "Task" [ 1040.415247] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.424182] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.467121] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "refresh_cache-bcc91c22-5f92-4233-a293-54f2790a750b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.467647] env[65503]: DEBUG nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Instance network_info: |[{"id": "17d55d47-c1ec-47a1-b233-b7ea23837bb5", "address": "fa:16:3e:19:60:19", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17d55d47-c1", "ovs_interfaceid": "17d55d47-c1ec-47a1-b233-b7ea23837bb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1040.468065] env[65503]: DEBUG oslo_concurrency.lockutils [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Acquired lock "refresh_cache-bcc91c22-5f92-4233-a293-54f2790a750b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.468284] env[65503]: DEBUG nova.network.neutron [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Refreshing network info cache for port 17d55d47-c1ec-47a1-b233-b7ea23837bb5 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1040.470435] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:60:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17d55d47-c1ec-47a1-b233-b7ea23837bb5', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1040.480271] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1040.481895] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1040.482286] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c4cac62-5c0c-4d8e-beb9-f81245f5656c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.502177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.503047] env[65503]: DEBUG nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1040.509031] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.309s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.509276] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.511524] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 10.515s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.523223] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1040.523223] env[65503]: value = "task-4450565" [ 1040.523223] env[65503]: _type = "Task" [ 1040.523223] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.539784] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450565, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.542975] env[65503]: INFO nova.scheduler.client.report [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted allocations for instance 82415443-1884-4898-996e-828d23f67f23 [ 1040.625624] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.626040] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.806757] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450563, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.926423] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450564, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.983105] env[65503]: WARNING neutronclient.v2_0.client [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1040.985712] env[65503]: WARNING openstack [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1040.986300] env[65503]: WARNING openstack [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1041.011652] env[65503]: DEBUG nova.compute.utils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1041.013553] env[65503]: DEBUG nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1041.023218] env[65503]: DEBUG nova.network.neutron [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1041.023218] env[65503]: WARNING neutronclient.v2_0.client [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1041.023218] env[65503]: WARNING neutronclient.v2_0.client [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1041.023218] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1041.023218] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1041.056968] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450565, 'name': CreateVM_Task, 'duration_secs': 0.471983} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.057563] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4bf91945-e354-47aa-b06f-c54f74c8c19b tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "82415443-1884-4898-996e-828d23f67f23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.697s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.061545] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.062804] env[65503]: WARNING neutronclient.v2_0.client [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1041.063223] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.063400] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.063680] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1041.063965] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbebce45-efe1-4505-9be1-a363b99c48ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.077785] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1041.077785] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524132d9-9a25-5deb-a2b4-bbf0eedae5ac" [ 1041.077785] env[65503]: _type = "Task" [ 1041.077785] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.091342] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524132d9-9a25-5deb-a2b4-bbf0eedae5ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.135850] env[65503]: DEBUG nova.policy [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e2c1b5740924a1fbccede1d48e95f85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3ade9ce0dc44449bb7a3bf0c624e366', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1041.138159] env[65503]: DEBUG nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1041.170027] env[65503]: WARNING openstack [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1041.170526] env[65503]: WARNING openstack [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1041.304677] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450563, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570636} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.304677] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] f50dce3d-4ef2-46e3-9044-c53b55ddbefb/f50dce3d-4ef2-46e3-9044-c53b55ddbefb.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1041.304677] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1041.305037] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39569294-8732-4201-b158-bfcf4558ffdb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.317373] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1041.317373] env[65503]: value = "task-4450566" [ 1041.317373] env[65503]: _type = "Task" [ 1041.317373] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.331697] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450566, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.374204] env[65503]: WARNING neutronclient.v2_0.client [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1041.375048] env[65503]: WARNING openstack [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1041.375452] env[65503]: WARNING openstack [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1041.385232] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20edd084-abb8-471f-896f-5cac27d3fd25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.395870] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e68d7f-1f14-42ff-9378-a237783164cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.430876] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1e973f-9df4-48f2-91a5-74aec4972787 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.444324] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b8f2db-15b3-41f2-8841-f0de85b391cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.447360] env[65503]: DEBUG oslo_vmware.api [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450564, 'name': PowerOnVM_Task, 'duration_secs': 0.695418} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.452325] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.452567] env[65503]: INFO nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1041.452736] env[65503]: DEBUG nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1041.454680] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ccfc62-390b-43ab-ba36-1ac9d690d732 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.471016] env[65503]: DEBUG nova.compute.provider_tree [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.473189] env[65503]: DEBUG nova.compute.manager [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1041.473401] env[65503]: DEBUG nova.compute.manager [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing instance network info cache due to event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1041.473622] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.473751] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.473896] env[65503]: DEBUG nova.network.neutron [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1041.507031] env[65503]: DEBUG nova.network.neutron [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Updated VIF entry in instance network info cache for port 17d55d47-c1ec-47a1-b233-b7ea23837bb5. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1041.507639] env[65503]: DEBUG nova.network.neutron [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Updating instance_info_cache with network_info: [{"id": "17d55d47-c1ec-47a1-b233-b7ea23837bb5", "address": "fa:16:3e:19:60:19", "network": {"id": "1f719e09-5d6d-421c-a54a-62fb33d0688c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1224151961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf54348a3d0948cfa816cc3746e86806", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17d55d47-c1", "ovs_interfaceid": "17d55d47-c1ec-47a1-b233-b7ea23837bb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1041.535549] env[65503]: DEBUG nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1041.590712] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524132d9-9a25-5deb-a2b4-bbf0eedae5ac, 'name': SearchDatastore_Task, 'duration_secs': 0.031017} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.591035] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.591276] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.591506] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.591643] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.591813] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.592100] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93d01457-00e4-402f-b7b4-ef584e057ce5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.608284] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.608567] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.609976] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a45dbf1a-9f8d-413a-8623-9138cd5e43ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.617472] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1041.617472] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52431ba8-f2b5-496f-2f52-614c37b8ac21" [ 1041.617472] env[65503]: _type = "Task" [ 1041.617472] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.627453] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52431ba8-f2b5-496f-2f52-614c37b8ac21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.628532] env[65503]: DEBUG nova.network.neutron [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Successfully created port: 7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1041.661108] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.830713] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450566, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082042} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.831045] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1041.831908] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c8f85b-e912-49f8-a4f4-56c4acab29c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.856145] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] f50dce3d-4ef2-46e3-9044-c53b55ddbefb/f50dce3d-4ef2-46e3-9044-c53b55ddbefb.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.856501] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd669749-4eeb-45c7-ae48-6ebe1235301e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.877088] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1041.877088] env[65503]: value = "task-4450567" [ 1041.877088] env[65503]: _type = "Task" [ 1041.877088] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.887081] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450567, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.972965] env[65503]: DEBUG nova.scheduler.client.report [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.980387] env[65503]: WARNING neutronclient.v2_0.client [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1041.981045] env[65503]: WARNING openstack [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1041.981678] env[65503]: WARNING openstack [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1041.996312] env[65503]: INFO nova.compute.manager [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Took 18.88 seconds to build instance. [ 1042.009900] env[65503]: DEBUG oslo_concurrency.lockutils [req-25558a36-c28e-41e9-ac9d-5983bd742cba req-159fe0a0-7b3a-407f-8623-b4b66926381d service nova] Releasing lock "refresh_cache-bcc91c22-5f92-4233-a293-54f2790a750b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.133337] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52431ba8-f2b5-496f-2f52-614c37b8ac21, 'name': SearchDatastore_Task, 'duration_secs': 0.042637} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.133561] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4157a480-3c0f-4019-9d7a-ddb6c95ca9f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.141658] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1042.141658] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52928304-2c4f-d38a-adbb-e6452a5da46d" [ 1042.141658] env[65503]: _type = "Task" [ 1042.141658] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.150352] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52928304-2c4f-d38a-adbb-e6452a5da46d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.162251] env[65503]: WARNING openstack [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1042.162773] env[65503]: WARNING openstack [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1042.301650] env[65503]: WARNING neutronclient.v2_0.client [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1042.302423] env[65503]: WARNING openstack [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1042.302771] env[65503]: WARNING openstack [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1042.388264] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450567, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.434307] env[65503]: DEBUG nova.network.neutron [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updated VIF entry in instance network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1042.434848] env[65503]: DEBUG nova.network.neutron [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1042.498984] env[65503]: DEBUG oslo_concurrency.lockutils [None req-826de49c-b4c0-433d-a210-25e909ccff49 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "f5319f15-16eb-468a-a70e-7226963ed219" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.391s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.549405] env[65503]: DEBUG nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1042.580523] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1042.580804] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1042.580983] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1042.581184] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1042.581342] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1042.581495] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1042.581723] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.581893] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1042.582068] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1042.582256] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1042.582441] env[65503]: DEBUG nova.virt.hardware [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1042.583472] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52126e2f-24f1-406b-a3ff-d74b191e2d8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.593908] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0360032c-8bae-48a5-aa38-3d667cf623c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.654130] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52928304-2c4f-d38a-adbb-e6452a5da46d, 'name': SearchDatastore_Task, 'duration_secs': 0.028542} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.654477] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.654741] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] bcc91c22-5f92-4233-a293-54f2790a750b/bcc91c22-5f92-4233-a293-54f2790a750b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.655024] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f60d69a8-9f36-4c88-9223-3ce122f332b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.663302] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1042.663302] env[65503]: value = "task-4450568" [ 1042.663302] env[65503]: _type = "Task" [ 1042.663302] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.673586] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.889703] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450567, 'name': ReconfigVM_Task, 'duration_secs': 0.832564} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.890020] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Reconfigured VM instance instance-00000060 to attach disk [datastore1] f50dce3d-4ef2-46e3-9044-c53b55ddbefb/f50dce3d-4ef2-46e3-9044-c53b55ddbefb.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.890732] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-704db8a8-28a0-4d55-b270-a1fb52b0f02e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.898514] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1042.898514] env[65503]: value = "task-4450569" [ 1042.898514] env[65503]: _type = "Task" [ 1042.898514] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.908874] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450569, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.937887] env[65503]: DEBUG oslo_concurrency.lockutils [req-1d4b7272-5e04-46b1-82bc-8f0f71a688ec req-404cae34-20f6-4671-a4b1-80653a967074 service nova] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.988345] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.476s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.988345] env[65503]: DEBUG nova.compute.manager [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=65503) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5416}} [ 1042.991473] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.197s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.992458] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.992458] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1042.992458] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.200s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.994922] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10ed8a5-c0e2-4f7f-bb85-44ebabb9ba45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.008413] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fec18c-e45d-4ef3-abcd-7323018bb71e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.025645] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d78c7e8-3445-43ae-b23f-2f87a4c25d7b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.035102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f43699-88e0-4d07-886d-11ca45456e03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.072261] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180066MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1043.072435] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.083808] env[65503]: DEBUG nova.compute.manager [req-a62d6b47-1200-49a4-b1f9-11dadf9ab210 req-8221f729-2b0a-4102-bd0a-728a9b7e968e service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Received event network-vif-plugged-7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1043.084052] env[65503]: DEBUG oslo_concurrency.lockutils [req-a62d6b47-1200-49a4-b1f9-11dadf9ab210 req-8221f729-2b0a-4102-bd0a-728a9b7e968e service nova] Acquiring lock "b7f55645-f152-4fc9-a962-393f9a9b9c55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.084698] env[65503]: DEBUG oslo_concurrency.lockutils [req-a62d6b47-1200-49a4-b1f9-11dadf9ab210 req-8221f729-2b0a-4102-bd0a-728a9b7e968e service nova] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.084927] env[65503]: DEBUG oslo_concurrency.lockutils [req-a62d6b47-1200-49a4-b1f9-11dadf9ab210 req-8221f729-2b0a-4102-bd0a-728a9b7e968e service nova] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.085120] env[65503]: DEBUG nova.compute.manager [req-a62d6b47-1200-49a4-b1f9-11dadf9ab210 req-8221f729-2b0a-4102-bd0a-728a9b7e968e service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] No waiting events found dispatching network-vif-plugged-7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1043.085380] env[65503]: WARNING nova.compute.manager [req-a62d6b47-1200-49a4-b1f9-11dadf9ab210 req-8221f729-2b0a-4102-bd0a-728a9b7e968e service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Received unexpected event network-vif-plugged-7467737c-bad8-423a-85d9-f5870d27aebc for instance with vm_state building and task_state spawning. [ 1043.174230] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.187286] env[65503]: DEBUG nova.network.neutron [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Successfully updated port: 7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1043.411963] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450569, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.497890] env[65503]: INFO nova.compute.claims [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1043.563363] env[65503]: INFO nova.scheduler.client.report [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted allocation for migration 76c6a98b-6534-43d9-bd70-39ee4d80a12e [ 1043.679166] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.690530] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.690530] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.690634] env[65503]: DEBUG nova.network.neutron [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1043.912215] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450569, 'name': Rename_Task, 'duration_secs': 0.897624} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.912558] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1043.912852] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0be45a58-cf9d-4c34-ba65-91b8d8f7efd5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.916904] env[65503]: DEBUG nova.objects.instance [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'flavor' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.922845] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1043.922845] env[65503]: value = "task-4450570" [ 1043.922845] env[65503]: _type = "Task" [ 1043.922845] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.935337] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450570, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.977927] env[65503]: DEBUG nova.compute.manager [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1043.979252] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c361e484-b0ac-4c83-bf7e-c201ffa764e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.005710] env[65503]: INFO nova.compute.resource_tracker [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating resource usage from migration 565057d4-ce23-4f92-ad47-1760ef87ee11 [ 1044.071045] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb145a93-101f-4230-b733-c6d176cab189 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.509s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.178387] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450568, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.194674] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1044.194674] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1044.241042] env[65503]: DEBUG nova.network.neutron [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1044.267020] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1044.267452] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1044.276867] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a6796f-3414-44a0-a470-a897577bc347 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.286035] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfaf98a-aa7d-4694-8d34-84405eeb90eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.325071] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378cf465-29e3-4ba0-b6c6-2405fa00a087 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.334879] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3c47af-132a-401c-86f5-f58ab20dfdcf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.355736] env[65503]: DEBUG nova.compute.provider_tree [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1044.376054] env[65503]: WARNING neutronclient.v2_0.client [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1044.376693] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1044.377133] env[65503]: WARNING openstack [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1044.422356] env[65503]: DEBUG oslo_concurrency.lockutils [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.422624] env[65503]: DEBUG oslo_concurrency.lockutils [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.422773] env[65503]: DEBUG nova.network.neutron [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1044.422947] env[65503]: DEBUG nova.objects.instance [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'info_cache' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.435500] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450570, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.484709] env[65503]: DEBUG nova.network.neutron [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1044.496136] env[65503]: INFO nova.compute.manager [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] instance snapshotting [ 1044.499810] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ac82f9-811c-47d0-beb4-2cd76583d3ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.523636] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fee326-fff5-437f-bb20-416cc2537940 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.677649] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450568, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.563875} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.677885] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] bcc91c22-5f92-4233-a293-54f2790a750b/bcc91c22-5f92-4233-a293-54f2790a750b.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1044.678120] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1044.678400] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-365e1138-969c-4bef-b454-e547424aff14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.688336] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1044.688336] env[65503]: value = "task-4450571" [ 1044.688336] env[65503]: _type = "Task" [ 1044.688336] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.702563] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450571, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.877976] env[65503]: ERROR nova.scheduler.client.report [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [req-9fb6aa57-9308-41db-aac3-67ffa7542d72] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9fb6aa57-9308-41db-aac3-67ffa7542d72"}]} [ 1044.896328] env[65503]: DEBUG nova.scheduler.client.report [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1044.911017] env[65503]: DEBUG nova.scheduler.client.report [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1044.911284] env[65503]: DEBUG nova.compute.provider_tree [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1044.922951] env[65503]: DEBUG nova.scheduler.client.report [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1044.926269] env[65503]: DEBUG nova.objects.base [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Object Instance<2effe3e4-ea22-4d9f-8f5c-38ee661611e3> lazy-loaded attributes: flavor,info_cache {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1044.937631] env[65503]: DEBUG oslo_vmware.api [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450570, 'name': PowerOnVM_Task, 'duration_secs': 0.747989} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.938512] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1044.938716] env[65503]: INFO nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1044.938889] env[65503]: DEBUG nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1044.939696] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee75784-c9c0-41b9-89bd-776458ab259a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.945148] env[65503]: DEBUG nova.scheduler.client.report [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1044.988513] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.989075] env[65503]: DEBUG nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Instance network_info: |[{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1044.991131] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:92:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7467737c-bad8-423a-85d9-f5870d27aebc', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.998078] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1045.001316] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.001752] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6b00c87-8305-4516-aaef-e5ebe6e5c727 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.026568] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.026568] env[65503]: value = "task-4450572" [ 1045.026568] env[65503]: _type = "Task" [ 1045.026568] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.039682] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1045.040280] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450572, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.040538] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-150919b0-b02e-4835-82d7-3451a1f81be9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.048346] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1045.048346] env[65503]: value = "task-4450573" [ 1045.048346] env[65503]: _type = "Task" [ 1045.048346] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.063080] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450573, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.120907] env[65503]: DEBUG nova.compute.manager [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Received event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1045.121160] env[65503]: DEBUG nova.compute.manager [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing instance network info cache due to event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1045.121287] env[65503]: DEBUG oslo_concurrency.lockutils [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Acquiring lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.121454] env[65503]: DEBUG oslo_concurrency.lockutils [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Acquired lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.121614] env[65503]: DEBUG nova.network.neutron [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1045.201026] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450571, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077917} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.201452] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1045.202332] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2247050-2513-4379-ae77-ec9d6ae23a7c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.205435] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac38cc8-e768-40a6-9389-47a879f7e5bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.225539] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7e1df5-7cd3-4f32-adac-da326c06684d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.244743] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] bcc91c22-5f92-4233-a293-54f2790a750b/bcc91c22-5f92-4233-a293-54f2790a750b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1045.245218] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73fa0ddf-4ad6-4822-a5f4-565562f6e833 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.267722] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1045.267722] env[65503]: value = "task-4450574" [ 1045.267722] env[65503]: _type = "Task" [ 1045.267722] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.296527] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bf3740-0393-4290-aa01-0202003e7cfd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.308187] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.311601] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d206e42-a2b2-4c80-bd75-6e1a3c2a8ae3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.328114] env[65503]: DEBUG nova.compute.provider_tree [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1045.432311] env[65503]: WARNING neutronclient.v2_0.client [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1045.432983] env[65503]: WARNING openstack [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1045.433360] env[65503]: WARNING openstack [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1045.461317] env[65503]: INFO nova.compute.manager [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Took 21.37 seconds to build instance. [ 1045.542514] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450572, 'name': CreateVM_Task, 'duration_secs': 0.453395} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.542801] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.543313] env[65503]: WARNING neutronclient.v2_0.client [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1045.543775] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.543923] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.544268] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1045.544659] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6352acf-fd44-4496-8e3c-28f04c83dfd4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.550776] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1045.550776] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528c6ef1-6e52-1a83-9c9c-aba93a82c2af" [ 1045.550776] env[65503]: _type = "Task" [ 1045.550776] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.563306] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528c6ef1-6e52-1a83-9c9c-aba93a82c2af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.566622] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450573, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.582340] env[65503]: WARNING openstack [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1045.582772] env[65503]: WARNING openstack [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1045.626624] env[65503]: WARNING neutronclient.v2_0.client [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1045.627403] env[65503]: WARNING openstack [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1045.627837] env[65503]: WARNING openstack [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1045.669263] env[65503]: WARNING neutronclient.v2_0.client [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1045.670012] env[65503]: WARNING openstack [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1045.670423] env[65503]: WARNING openstack [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1045.754735] env[65503]: WARNING openstack [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1045.755352] env[65503]: WARNING openstack [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1045.791768] env[65503]: DEBUG nova.network.neutron [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [{"id": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "address": "fa:16:3e:c9:1b:64", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2945b8a-83", "ovs_interfaceid": "e2945b8a-8327-4ac8-8d42-fc828663c0e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1045.805431] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450574, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.856398] env[65503]: WARNING neutronclient.v2_0.client [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1045.857094] env[65503]: WARNING openstack [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1045.857488] env[65503]: WARNING openstack [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1045.875797] env[65503]: DEBUG nova.scheduler.client.report [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1045.876062] env[65503]: DEBUG nova.compute.provider_tree [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 130 to 131 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1045.876295] env[65503]: DEBUG nova.compute.provider_tree [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1045.964828] env[65503]: DEBUG oslo_concurrency.lockutils [None req-984fe7bb-8562-4cf9-b52d-f268443ce7a0 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.887s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.994058] env[65503]: DEBUG nova.network.neutron [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updated VIF entry in instance network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1045.994058] env[65503]: DEBUG nova.network.neutron [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1046.073378] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450573, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.078477] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528c6ef1-6e52-1a83-9c9c-aba93a82c2af, 'name': SearchDatastore_Task, 'duration_secs': 0.012695} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.079054] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.079486] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.079873] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.080179] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.080848] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.081048] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6156a407-78ca-4f2e-a8c6-bb8f5959ea00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.094109] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.094109] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.095802] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-065979f9-c31c-4225-acef-cdb36d71c515 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.103439] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1046.103439] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ca33a0-2184-0e91-6fcd-c60414b512b7" [ 1046.103439] env[65503]: _type = "Task" [ 1046.103439] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.114537] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ca33a0-2184-0e91-6fcd-c60414b512b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.119047] env[65503]: DEBUG nova.compute.manager [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Received event network-changed-8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1046.119240] env[65503]: DEBUG nova.compute.manager [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Refreshing instance network info cache due to event network-changed-8eff3104-0a50-4cce-b7a9-35ca32ee32fd. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1046.119441] env[65503]: DEBUG oslo_concurrency.lockutils [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Acquiring lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.119567] env[65503]: DEBUG oslo_concurrency.lockutils [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Acquired lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.119730] env[65503]: DEBUG nova.network.neutron [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Refreshing network info cache for port 8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1046.299676] env[65503]: DEBUG oslo_concurrency.lockutils [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2effe3e4-ea22-4d9f-8f5c-38ee661611e3" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.308971] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450574, 'name': ReconfigVM_Task, 'duration_secs': 0.546526} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.309323] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Reconfigured VM instance instance-00000061 to attach disk [datastore1] bcc91c22-5f92-4233-a293-54f2790a750b/bcc91c22-5f92-4233-a293-54f2790a750b.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1046.310914] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d726807f-f33b-4117-8071-8d7b3574489d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.325405] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1046.325405] env[65503]: value = "task-4450575" [ 1046.325405] env[65503]: _type = "Task" [ 1046.325405] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.337607] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450575, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.382089] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.390s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.382479] env[65503]: INFO nova.compute.manager [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Migrating [ 1046.389508] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.728s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.391173] env[65503]: INFO nova.compute.claims [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1046.497172] env[65503]: DEBUG oslo_concurrency.lockutils [req-f04579fa-afb1-4469-a781-825bbf737adf req-afdf14f2-8da1-47e1-bf4e-733521a1bdba service nova] Releasing lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.568726] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450573, 'name': CreateSnapshot_Task, 'duration_secs': 1.118972} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.568991] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1046.569765] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6771dcd3-1475-401a-bfb2-3c1603a11440 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.614499] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ca33a0-2184-0e91-6fcd-c60414b512b7, 'name': SearchDatastore_Task, 'duration_secs': 0.023968} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.615331] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a4cbe0-1c6b-4f70-8ad0-169a5b89d24f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.621974] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1046.621974] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cf812c-21ff-b217-03bc-0084a80ca935" [ 1046.621974] env[65503]: _type = "Task" [ 1046.621974] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.624966] env[65503]: WARNING neutronclient.v2_0.client [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1046.625669] env[65503]: WARNING openstack [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1046.626132] env[65503]: WARNING openstack [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1046.643531] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cf812c-21ff-b217-03bc-0084a80ca935, 'name': SearchDatastore_Task, 'duration_secs': 0.010001} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.643933] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.644353] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/b7f55645-f152-4fc9-a962-393f9a9b9c55.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.645219] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7c4b775-5a62-4df7-903c-27745828ee1c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.653946] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1046.653946] env[65503]: value = "task-4450576" [ 1046.653946] env[65503]: _type = "Task" [ 1046.653946] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.663271] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450576, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.731885] env[65503]: WARNING openstack [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1046.732394] env[65503]: WARNING openstack [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1046.742462] env[65503]: DEBUG oslo_concurrency.lockutils [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.742758] env[65503]: DEBUG oslo_concurrency.lockutils [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.743115] env[65503]: INFO nova.compute.manager [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Rebooting instance [ 1046.809197] env[65503]: WARNING neutronclient.v2_0.client [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1046.810719] env[65503]: WARNING openstack [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1046.810719] env[65503]: WARNING openstack [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1046.839112] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450575, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.900643] env[65503]: DEBUG nova.network.neutron [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updated VIF entry in instance network info cache for port 8eff3104-0a50-4cce-b7a9-35ca32ee32fd. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1046.901041] env[65503]: DEBUG nova.network.neutron [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updating instance_info_cache with network_info: [{"id": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "address": "fa:16:3e:9e:b7:dc", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eff3104-0a", "ovs_interfaceid": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1046.905054] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.908598] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.908598] env[65503]: DEBUG nova.network.neutron [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1047.088929] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1047.089325] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7790abaf-d14e-42e9-8440-d43d244a624d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.123652] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1047.123652] env[65503]: value = "task-4450577" [ 1047.123652] env[65503]: _type = "Task" [ 1047.123652] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.133563] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450577, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.164602] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450576, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476252} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.164896] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/b7f55645-f152-4fc9-a962-393f9a9b9c55.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.165109] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.165472] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-124c4a50-79c8-4360-87c4-331c9532c66b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.174671] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1047.174671] env[65503]: value = "task-4450578" [ 1047.174671] env[65503]: _type = "Task" [ 1047.174671] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.184547] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450578, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.263434] env[65503]: DEBUG oslo_concurrency.lockutils [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.306368] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.306854] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b130be11-2851-467d-9b05-c33ca2ca5621 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.316232] env[65503]: DEBUG oslo_vmware.api [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1047.316232] env[65503]: value = "task-4450579" [ 1047.316232] env[65503]: _type = "Task" [ 1047.316232] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.326768] env[65503]: DEBUG oslo_vmware.api [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450579, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.335874] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450575, 'name': Rename_Task, 'duration_secs': 0.637594} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.336162] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.336415] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84f871ad-4b95-4637-9531-b3517504aaab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.344802] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1047.344802] env[65503]: value = "task-4450580" [ 1047.344802] env[65503]: _type = "Task" [ 1047.344802] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.354346] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.405620] env[65503]: DEBUG oslo_concurrency.lockutils [req-99c37ea1-e35c-4d16-9c4f-89d8e03e6210 req-70e42ef0-fde1-4484-a0c5-436934efc089 service nova] Releasing lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.406324] env[65503]: DEBUG oslo_concurrency.lockutils [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquired lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.406624] env[65503]: DEBUG nova.network.neutron [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1047.414902] env[65503]: WARNING neutronclient.v2_0.client [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1047.415932] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.416299] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1047.563738] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.564160] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1047.635109] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450577, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.639501] env[65503]: WARNING neutronclient.v2_0.client [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1047.640219] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.640632] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1047.688053] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450578, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115303} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.694039] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.694039] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b3379d-e59f-456c-af69-39e57843d42f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.718689] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/b7f55645-f152-4fc9-a962-393f9a9b9c55.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.720173] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-655a9213-85be-4c36-807c-4511211230fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.739783] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90b0f68-9df2-4b09-bfcd-140a7f0c930f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.745041] env[65503]: DEBUG nova.network.neutron [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance_info_cache with network_info: [{"id": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "address": "fa:16:3e:b6:80:b2", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309ef3a-1e", "ovs_interfaceid": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1047.754795] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3d7e3a-0717-4a5f-8228-ca51f305a049 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.759254] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1047.759254] env[65503]: value = "task-4450581" [ 1047.759254] env[65503]: _type = "Task" [ 1047.759254] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.799593] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33ce359-164d-4f55-be79-7529e8be2195 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.803644] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450581, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.810428] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87990183-bfd0-46b3-9a85-3fe33cf42337 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.829499] env[65503]: DEBUG nova.compute.provider_tree [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.838767] env[65503]: DEBUG oslo_vmware.api [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450579, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.856439] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450580, 'name': PowerOnVM_Task} progress is 78%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.910491] env[65503]: WARNING neutronclient.v2_0.client [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1047.911372] env[65503]: WARNING openstack [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.911713] env[65503]: WARNING openstack [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1048.110098] env[65503]: WARNING openstack [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1048.110602] env[65503]: WARNING openstack [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1048.138120] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450577, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.186880] env[65503]: WARNING neutronclient.v2_0.client [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1048.187678] env[65503]: WARNING openstack [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1048.188112] env[65503]: WARNING openstack [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1048.249562] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.273234] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.289727] env[65503]: DEBUG nova.network.neutron [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updating instance_info_cache with network_info: [{"id": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "address": "fa:16:3e:9e:b7:dc", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eff3104-0a", "ovs_interfaceid": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1048.332937] env[65503]: DEBUG nova.scheduler.client.report [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.336805] env[65503]: DEBUG oslo_vmware.api [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450579, 'name': PowerOnVM_Task, 'duration_secs': 0.66441} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.337417] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.337620] env[65503]: DEBUG nova.compute.manager [None req-025b652c-8d1c-43ea-acf8-db25b270ae4f tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1048.338563] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7ed5fc-4cf8-4ff6-8d20-c5fabf321a02 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.366372] env[65503]: DEBUG oslo_vmware.api [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450580, 'name': PowerOnVM_Task, 'duration_secs': 0.915608} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.366824] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.367144] env[65503]: INFO nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Took 10.15 seconds to spawn the instance on the hypervisor. [ 1048.367421] env[65503]: DEBUG nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1048.368320] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5dd04e4-385e-4422-8b3c-a18ae6457210 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.634844] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450577, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.772306] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450581, 'name': ReconfigVM_Task, 'duration_secs': 0.676147} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.772778] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfigured VM instance instance-00000062 to attach disk [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/b7f55645-f152-4fc9-a962-393f9a9b9c55.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.773438] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-459370c8-6420-4708-8a31-421353350d1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.782078] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1048.782078] env[65503]: value = "task-4450582" [ 1048.782078] env[65503]: _type = "Task" [ 1048.782078] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.790940] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450582, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.793534] env[65503]: DEBUG oslo_concurrency.lockutils [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Releasing lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.844028] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.844722] env[65503]: DEBUG nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1048.847922] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.775s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.889906] env[65503]: INFO nova.compute.manager [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Took 24.73 seconds to build instance. [ 1049.136394] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450577, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.293689] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450582, 'name': Rename_Task, 'duration_secs': 0.158925} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.293978] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.294305] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3638d8d-cbbd-4227-8f03-509f4037e043 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.298219] env[65503]: DEBUG nova.compute.manager [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1049.298989] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f37dfdb-1390-4e0e-a31b-2526546f559c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.303450] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1049.303450] env[65503]: value = "task-4450583" [ 1049.303450] env[65503]: _type = "Task" [ 1049.303450] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.317651] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450583, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.352344] env[65503]: DEBUG nova.compute.utils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1049.364381] env[65503]: DEBUG nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1049.364381] env[65503]: DEBUG nova.network.neutron [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1049.364381] env[65503]: WARNING neutronclient.v2_0.client [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1049.364660] env[65503]: WARNING neutronclient.v2_0.client [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1049.365147] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1049.365492] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1049.392670] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7292ab1c-1435-4501-aa99-6f5f7fcd87f3 tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "bcc91c22-5f92-4233-a293-54f2790a750b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.240s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.428205] env[65503]: DEBUG nova.policy [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1049.639028] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450577, 'name': CloneVM_Task, 'duration_secs': 2.265287} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.639217] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Created linked-clone VM from snapshot [ 1049.640109] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eec6fc6-0772-4d0e-9579-1afd5712d34d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.649155] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Uploading image 5b2e951c-3e9a-4d3c-b99d-569f5b89e872 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1049.687206] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1049.687206] env[65503]: value = "vm-870458" [ 1049.687206] env[65503]: _type = "VirtualMachine" [ 1049.687206] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1049.687605] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-93227286-0963-4310-b20c-58acf5630979 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.697377] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease: (returnval){ [ 1049.697377] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529f5c63-e6ef-3567-e253-216cd66d1e03" [ 1049.697377] env[65503]: _type = "HttpNfcLease" [ 1049.697377] env[65503]: } obtained for exporting VM: (result){ [ 1049.697377] env[65503]: value = "vm-870458" [ 1049.697377] env[65503]: _type = "VirtualMachine" [ 1049.697377] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1049.697715] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the lease: (returnval){ [ 1049.697715] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529f5c63-e6ef-3567-e253-216cd66d1e03" [ 1049.697715] env[65503]: _type = "HttpNfcLease" [ 1049.697715] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1049.705444] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1049.705444] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529f5c63-e6ef-3567-e253-216cd66d1e03" [ 1049.705444] env[65503]: _type = "HttpNfcLease" [ 1049.705444] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1049.764796] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b4a288-7e94-4647-be1e-59b2e72906bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.787107] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance 'bee97942-afb2-465f-9774-56f5aa8becca' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1049.804461] env[65503]: DEBUG nova.network.neutron [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Successfully created port: aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1049.820211] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450583, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.864783] env[65503]: DEBUG nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1049.878295] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Applying migration context for instance bee97942-afb2-465f-9774-56f5aa8becca as it has an incoming, in-progress migration 565057d4-ce23-4f92-ad47-1760ef87ee11. Migration status is migrating {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1049.878295] env[65503]: INFO nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating resource usage from migration 565057d4-ce23-4f92-ad47-1760ef87ee11 [ 1049.898652] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.899039] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.899253] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.899432] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.899590] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.902898] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e37758cc-7287-4271-ad47-d711201d0add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.902898] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.902898] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance fcdcabb9-f076-4fa9-ac30-3220eb6064da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.902898] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 96d8f433-9b86-422f-88ef-99836fb21f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.902898] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.902898] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b00a98a4-4865-4a02-a353-3d1da9ef0e51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903184] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f89ca00e-d54e-4040-bf18-9a5ec96378d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903184] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903184] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 1d1a96cc-63b3-472c-b94a-1ea00763f770 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903285] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f5319f15-16eb-468a-a70e-7226963ed219 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903360] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f50dce3d-4ef2-46e3-9044-c53b55ddbefb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903456] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bcc91c22-5f92-4233-a293-54f2790a750b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903550] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b7f55645-f152-4fc9-a962-393f9a9b9c55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.903648] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Migration 565057d4-ce23-4f92-ad47-1760ef87ee11 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 1049.903741] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bee97942-afb2-465f-9774-56f5aa8becca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.904670] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance f71dca10-3b68-4f1e-868e-a8c8271f7c88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1049.904670] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1049.904670] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=100GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '15', 'num_vm_active': '12', 'num_task_None': '11', 'num_os_type_None': '15', 'num_proj_3658921b747e4d78a2046b838cb36d26': '1', 'io_workload': '3', 'num_proj_d3ade9ce0dc44449bb7a3bf0c624e366': '3', 'num_vm_rescued': '1', 'num_proj_34e8cd66745a40d2acebbce98050ee5d': '1', 'num_proj_0dbf0e9b08b741e88f971ec5f54dede8': '2', 'num_proj_592efb180976432cbcecb9ad421e1bd1': '1', 'num_proj_19e0e62fe31a46fc802dbfc625ac7645': '1', 'num_task_resize_prep': '1', 'num_proj_463e93d05e1e4b27a3dc866a5b1991d0': '1', 'num_proj_521d40776571452e85178972f97c8622': '1', 'num_proj_be67f50c5bc447309d4c04f3f2805455': '2', 'num_task_image_uploading': '1', 'num_proj_8c5b0c3771b5434992cd58e1af539bde': '1', 'num_task_rebooting_hard': '1', 'num_proj_bf54348a3d0948cfa816cc3746e86806': '1', 'num_vm_building': '2', 'num_task_spawning': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1049.907599] env[65503]: INFO nova.compute.manager [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Terminating instance [ 1050.161991] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5aa987-8cbe-4383-b4a9-401b9a524b9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.172169] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87aaf1f8-c597-48cf-85a7-4d1bcbd1b40b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.222654] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cff12c-d598-4554-bd7a-61a9458a0a2d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.236311] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1180c7d-9358-4c82-bbac-dfd80b49f190 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.241953] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1050.241953] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529f5c63-e6ef-3567-e253-216cd66d1e03" [ 1050.241953] env[65503]: _type = "HttpNfcLease" [ 1050.241953] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1050.242832] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1050.242832] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529f5c63-e6ef-3567-e253-216cd66d1e03" [ 1050.242832] env[65503]: _type = "HttpNfcLease" [ 1050.242832] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1050.244370] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cc3bd8-2fe5-48b3-b6c1-37390b5a6ac9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.260413] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.270558] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd526-4079-2163-2727-b1ddba4fc29a/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1050.270558] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd526-4079-2163-2727-b1ddba4fc29a/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1050.347225] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.350379] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fcf969f-4043-410b-b4f8-2b1746dcff0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.358103] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067d90f1-42cc-4f07-acdf-37a3a09eeb31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.377624] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Doing hard reboot of VM {{(pid=65503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1050.378240] env[65503]: DEBUG oslo_vmware.api [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450583, 'name': PowerOnVM_Task, 'duration_secs': 0.696259} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.379671] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-82d4855d-8c77-4777-b823-a86f2458e564 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.381374] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.381584] env[65503]: INFO nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Took 7.83 seconds to spawn the instance on the hypervisor. [ 1050.381751] env[65503]: DEBUG nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1050.382088] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1050.382088] env[65503]: value = "task-4450585" [ 1050.382088] env[65503]: _type = "Task" [ 1050.382088] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.383102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004b3cd1-87b2-4320-bb94-1463dc369587 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.393624] env[65503]: DEBUG oslo_vmware.api [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1050.393624] env[65503]: value = "task-4450586" [ 1050.393624] env[65503]: _type = "Task" [ 1050.393624] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.408572] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.409923] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-38343e41-ebc2-4981-8af2-b64d53b3582a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.415938] env[65503]: DEBUG nova.compute.manager [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1050.416212] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.416562] env[65503]: DEBUG oslo_vmware.api [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450586, 'name': ResetVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.418533] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119b5247-7754-40d4-aba1-98f9289e7a8e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.427616] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.429912] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac356476-4b74-4584-a036-4aa834918c6d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.445931] env[65503]: DEBUG oslo_vmware.api [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1050.445931] env[65503]: value = "task-4450587" [ 1050.445931] env[65503]: _type = "Task" [ 1050.445931] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.469971] env[65503]: DEBUG oslo_vmware.api [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450587, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.601908] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "bcc91c22-5f92-4233-a293-54f2790a750b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.602369] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "bcc91c22-5f92-4233-a293-54f2790a750b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.602757] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "bcc91c22-5f92-4233-a293-54f2790a750b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.603111] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "bcc91c22-5f92-4233-a293-54f2790a750b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.603436] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "bcc91c22-5f92-4233-a293-54f2790a750b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.605834] env[65503]: INFO nova.compute.manager [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Terminating instance [ 1050.763974] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1050.885079] env[65503]: DEBUG nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1050.902998] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450585, 'name': PowerOffVM_Task, 'duration_secs': 0.285389} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.906259] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.906594] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance 'bee97942-afb2-465f-9774-56f5aa8becca' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1050.918717] env[65503]: DEBUG oslo_vmware.api [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450586, 'name': ResetVM_Task, 'duration_secs': 0.109367} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.919599] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Did hard reboot of VM {{(pid=65503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1050.919820] env[65503]: DEBUG nova.compute.manager [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1050.922711] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f634b942-bfef-4c24-a6e5-8214ee8ebc70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.928592] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1050.928931] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1050.929200] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1050.929476] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1050.929687] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1050.929878] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1050.930219] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1050.930475] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1050.930706] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1050.931488] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1050.931488] env[65503]: DEBUG nova.virt.hardware [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1050.932363] env[65503]: INFO nova.compute.manager [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Took 25.94 seconds to build instance. [ 1050.937110] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb9796a-474a-4392-b499-59e2180218c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.944488] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7246cd34-5deb-44f5-8369-21472f91b2a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.973576] env[65503]: DEBUG oslo_vmware.api [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450587, 'name': PowerOffVM_Task, 'duration_secs': 0.280187} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.973906] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.974336] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.974606] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00b43fa1-6d01-487e-9eb3-0fa27eb7b623 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.061461] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.061761] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.061890] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleting the datastore file [datastore2] 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.062297] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b8edc45-24ad-4baf-90cc-51d4c0030d77 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.071906] env[65503]: DEBUG oslo_vmware.api [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1051.071906] env[65503]: value = "task-4450589" [ 1051.071906] env[65503]: _type = "Task" [ 1051.071906] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.083711] env[65503]: DEBUG oslo_vmware.api [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.110842] env[65503]: DEBUG nova.compute.manager [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1051.111273] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.112450] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a560c701-556a-4dee-8de1-c64d36fe0cfb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.125288] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.125288] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-987da344-d376-4688-91f1-2a34d50362ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.135251] env[65503]: DEBUG oslo_vmware.api [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1051.135251] env[65503]: value = "task-4450590" [ 1051.135251] env[65503]: _type = "Task" [ 1051.135251] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.148113] env[65503]: DEBUG oslo_vmware.api [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.270034] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1051.270426] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.422s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.347260] env[65503]: DEBUG nova.compute.manager [req-e58fad55-12bb-45aa-ad36-51a47ded9306 req-fe526688-dd58-4c5b-bc66-e13f563a256a service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-vif-plugged-aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1051.347602] env[65503]: DEBUG oslo_concurrency.lockutils [req-e58fad55-12bb-45aa-ad36-51a47ded9306 req-fe526688-dd58-4c5b-bc66-e13f563a256a service nova] Acquiring lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.348074] env[65503]: DEBUG oslo_concurrency.lockutils [req-e58fad55-12bb-45aa-ad36-51a47ded9306 req-fe526688-dd58-4c5b-bc66-e13f563a256a service nova] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.348485] env[65503]: DEBUG oslo_concurrency.lockutils [req-e58fad55-12bb-45aa-ad36-51a47ded9306 req-fe526688-dd58-4c5b-bc66-e13f563a256a service nova] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.349464] env[65503]: DEBUG nova.compute.manager [req-e58fad55-12bb-45aa-ad36-51a47ded9306 req-fe526688-dd58-4c5b-bc66-e13f563a256a service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] No waiting events found dispatching network-vif-plugged-aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1051.349464] env[65503]: WARNING nova.compute.manager [req-e58fad55-12bb-45aa-ad36-51a47ded9306 req-fe526688-dd58-4c5b-bc66-e13f563a256a service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received unexpected event network-vif-plugged-aba660fb-0b53-43b7-a795-9ebaa9dd3097 for instance with vm_state building and task_state spawning. [ 1051.422084] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1051.422560] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1051.422560] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1051.422814] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1051.423089] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1051.423358] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1051.423644] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1051.423883] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1051.424155] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1051.424989] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1051.425300] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1051.430500] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81c8f24e-a638-4455-a3d7-d4e41f95258c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.443210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7cc61a9e-6309-4935-a7fc-a21cce7d2088 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.470s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.450877] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1051.450877] env[65503]: value = "task-4450591" [ 1051.450877] env[65503]: _type = "Task" [ 1051.450877] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.459501] env[65503]: DEBUG oslo_concurrency.lockutils [None req-901378ce-c2f2-420b-8979-4d2d6390fa0e tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.717s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.466399] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450591, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.482857] env[65503]: DEBUG nova.network.neutron [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Successfully updated port: aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1051.584066] env[65503]: DEBUG oslo_vmware.api [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281999} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.584844] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.584844] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.584952] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.585108] env[65503]: INFO nova.compute.manager [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1051.585498] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1051.585720] env[65503]: DEBUG nova.compute.manager [-] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1051.585993] env[65503]: DEBUG nova.network.neutron [-] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1051.586294] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1051.586906] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1051.587232] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1051.647912] env[65503]: DEBUG oslo_vmware.api [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450590, 'name': PowerOffVM_Task, 'duration_secs': 0.235192} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.648169] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.648329] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.648658] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-392db913-5cf6-40ce-b867-428e42cbccf5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.667089] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1051.710330] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.710572] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.710800] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleting the datastore file [datastore1] bcc91c22-5f92-4233-a293-54f2790a750b {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.711119] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd28d231-2be0-4e21-890e-c0fd1ef47063 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.719493] env[65503]: DEBUG oslo_vmware.api [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for the task: (returnval){ [ 1051.719493] env[65503]: value = "task-4450593" [ 1051.719493] env[65503]: _type = "Task" [ 1051.719493] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.729224] env[65503]: DEBUG oslo_vmware.api [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.962725] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450591, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.985978] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.986461] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.986812] env[65503]: DEBUG nova.network.neutron [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1052.232336] env[65503]: DEBUG oslo_vmware.api [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Task: {'id': task-4450593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31474} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.232603] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.232782] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.232949] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.233133] env[65503]: INFO nova.compute.manager [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1052.233380] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1052.233572] env[65503]: DEBUG nova.compute.manager [-] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1052.233672] env[65503]: DEBUG nova.network.neutron [-] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1052.233913] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1052.234551] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1052.235234] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1052.329682] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1052.421039] env[65503]: DEBUG nova.compute.manager [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Received event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1052.421039] env[65503]: DEBUG nova.compute.manager [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing instance network info cache due to event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1052.421190] env[65503]: DEBUG oslo_concurrency.lockutils [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Acquiring lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.421447] env[65503]: DEBUG oslo_concurrency.lockutils [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Acquired lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.421447] env[65503]: DEBUG nova.network.neutron [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1052.467454] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450591, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.492710] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1052.493434] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.448760] env[65503]: DEBUG nova.network.neutron [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1053.450987] env[65503]: DEBUG nova.network.neutron [-] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1053.452410] env[65503]: WARNING neutronclient.v2_0.client [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1053.453368] env[65503]: WARNING openstack [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.453800] env[65503]: WARNING openstack [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.466015] env[65503]: DEBUG nova.compute.manager [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1053.466546] env[65503]: DEBUG nova.compute.manager [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing instance network info cache due to event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1053.466776] env[65503]: DEBUG oslo_concurrency.lockutils [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.468293] env[65503]: DEBUG nova.network.neutron [-] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1053.484039] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450591, 'name': ReconfigVM_Task, 'duration_secs': 1.295886} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.484380] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance 'bee97942-afb2-465f-9774-56f5aa8becca' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1053.491382] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.491788] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.590034] env[65503]: WARNING neutronclient.v2_0.client [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1053.590848] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.591250] env[65503]: WARNING openstack [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.604055] env[65503]: WARNING openstack [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.604503] env[65503]: WARNING openstack [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.680513] env[65503]: WARNING neutronclient.v2_0.client [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1053.681320] env[65503]: WARNING openstack [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.681769] env[65503]: WARNING openstack [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.693233] env[65503]: DEBUG nova.network.neutron [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1053.698056] env[65503]: DEBUG oslo_concurrency.lockutils [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.698292] env[65503]: DEBUG oslo_concurrency.lockutils [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.774396] env[65503]: DEBUG nova.network.neutron [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updated VIF entry in instance network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1053.774899] env[65503]: DEBUG nova.network.neutron [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1053.966110] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.966425] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.966765] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.966977] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.967163] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.969053] env[65503]: INFO nova.compute.manager [-] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Took 2.38 seconds to deallocate network for instance. [ 1053.969542] env[65503]: INFO nova.compute.manager [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Terminating instance [ 1053.975709] env[65503]: INFO nova.compute.manager [-] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Took 1.74 seconds to deallocate network for instance. [ 1053.990699] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1053.990971] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1053.991108] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1053.991339] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1053.991551] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1053.991753] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1053.992015] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1053.992181] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1053.992349] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1053.992532] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1053.992723] env[65503]: DEBUG nova.virt.hardware [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1053.998857] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1053.999680] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5d6bd70-7b93-4ac9-aab5-ed3be6a13bf0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.022278] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1054.022278] env[65503]: value = "task-4450594" [ 1054.022278] env[65503]: _type = "Task" [ 1054.022278] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.033012] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.196528] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.197127] env[65503]: DEBUG nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Instance network_info: |[{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1054.197502] env[65503]: DEBUG oslo_concurrency.lockutils [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.197778] env[65503]: DEBUG nova.network.neutron [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1054.199015] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:17:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aba660fb-0b53-43b7-a795-9ebaa9dd3097', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1054.207386] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1054.209194] env[65503]: WARNING neutronclient.v2_0.client [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1054.209908] env[65503]: WARNING openstack [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1054.210278] env[65503]: WARNING openstack [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1054.219121] env[65503]: DEBUG nova.compute.utils [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1054.220628] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1054.222430] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5518304-9232-4cfc-825f-b7f5a85cc1a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.246944] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1054.246944] env[65503]: value = "task-4450595" [ 1054.246944] env[65503]: _type = "Task" [ 1054.246944] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.256764] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450595, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.278451] env[65503]: DEBUG oslo_concurrency.lockutils [req-b658ef90-fdb4-4b52-a2af-222f9f112441 req-f7caffdb-f050-498f-8b51-97cbdf6fbcbc service nova] Releasing lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.340644] env[65503]: WARNING openstack [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1054.341188] env[65503]: WARNING openstack [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1054.437246] env[65503]: WARNING neutronclient.v2_0.client [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1054.438867] env[65503]: WARNING openstack [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1054.438867] env[65503]: WARNING openstack [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1054.478508] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.479149] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.479590] env[65503]: DEBUG nova.objects.instance [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'resources' on Instance uuid 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.482362] env[65503]: DEBUG nova.compute.manager [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1054.482753] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.484439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.485560] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e51e870-73e4-4bda-9515-416f6dc9737c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.497262] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1054.497700] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3733d71-36d3-4da2-b7c3-c799827763b6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.506704] env[65503]: DEBUG oslo_vmware.api [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1054.506704] env[65503]: value = "task-4450596" [ 1054.506704] env[65503]: _type = "Task" [ 1054.506704] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.521291] env[65503]: DEBUG oslo_vmware.api [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.532033] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450594, 'name': ReconfigVM_Task, 'duration_secs': 0.290462} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.532439] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1054.533360] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835b2945-c5c9-4c9d-b56e-ddfbeb126c99 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.558134] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] bee97942-afb2-465f-9774-56f5aa8becca/bee97942-afb2-465f-9774-56f5aa8becca.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.558586] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67a6d567-98d4-4053-82e4-8d9e24170216 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.574298] env[65503]: DEBUG nova.network.neutron [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updated VIF entry in instance network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1054.574729] env[65503]: DEBUG nova.network.neutron [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1054.584383] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1054.584383] env[65503]: value = "task-4450597" [ 1054.584383] env[65503]: _type = "Task" [ 1054.584383] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.596767] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450597, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.631349] env[65503]: DEBUG nova.compute.manager [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Received event network-vif-deleted-17d55d47-c1ec-47a1-b233-b7ea23837bb5 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1054.632561] env[65503]: DEBUG nova.compute.manager [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Received event network-changed-8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1054.632561] env[65503]: DEBUG nova.compute.manager [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Refreshing instance network info cache due to event network-changed-8eff3104-0a50-4cce-b7a9-35ca32ee32fd. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1054.632561] env[65503]: DEBUG oslo_concurrency.lockutils [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Acquiring lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.633222] env[65503]: DEBUG oslo_concurrency.lockutils [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Acquired lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.633222] env[65503]: DEBUG nova.network.neutron [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Refreshing network info cache for port 8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1054.723857] env[65503]: DEBUG oslo_concurrency.lockutils [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.025s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.759381] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450595, 'name': CreateVM_Task, 'duration_secs': 0.40031} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.759688] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1054.760354] env[65503]: WARNING neutronclient.v2_0.client [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1054.760742] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.760893] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.761217] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1054.761488] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9f53cbf-3059-48ac-9e79-bf14ba6febf9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.767910] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1054.767910] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d9423-042e-5413-f3ec-ae2813b3faf0" [ 1054.767910] env[65503]: _type = "Task" [ 1054.767910] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.777241] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d9423-042e-5413-f3ec-ae2813b3faf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.023708] env[65503]: DEBUG oslo_vmware.api [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450596, 'name': PowerOffVM_Task, 'duration_secs': 0.252796} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.024110] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.024300] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.024642] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48cc86a5-c035-4b7f-8d03-80402cf8a4c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.079059] env[65503]: DEBUG oslo_concurrency.lockutils [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.079902] env[65503]: DEBUG nova.compute.manager [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Received event network-vif-deleted-e2945b8a-8327-4ac8-8d42-fc828663c0e0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1055.079902] env[65503]: INFO nova.compute.manager [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Neutron deleted interface e2945b8a-8327-4ac8-8d42-fc828663c0e0; detaching it from the instance and deleting it from the info cache [ 1055.080059] env[65503]: DEBUG nova.network.neutron [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1055.099582] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450597, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.100986] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.101201] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.101375] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Deleting the datastore file [datastore1] f50dce3d-4ef2-46e3-9044-c53b55ddbefb {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.101725] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14216159-6569-4693-a206-73231a73989c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.111069] env[65503]: DEBUG oslo_vmware.api [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1055.111069] env[65503]: value = "task-4450599" [ 1055.111069] env[65503]: _type = "Task" [ 1055.111069] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.123294] env[65503]: DEBUG oslo_vmware.api [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.136388] env[65503]: WARNING neutronclient.v2_0.client [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.137117] env[65503]: WARNING openstack [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.137439] env[65503]: WARNING openstack [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.242255] env[65503]: WARNING openstack [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.242694] env[65503]: WARNING openstack [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.257422] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4cb233-a866-4ea0-b8c6-6da90fdb5f80 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.265895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ea2b2d-d4bf-4ea5-bfa1-761d5efc68e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.309103] env[65503]: DEBUG oslo_concurrency.lockutils [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.309339] env[65503]: DEBUG oslo_concurrency.lockutils [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.309595] env[65503]: INFO nova.compute.manager [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Attaching volume 4ade83db-2b28-4173-9346-757fd8acf7f7 to /dev/sdb [ 1055.311857] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16fa279d-0658-4543-9508-5e78fa321bde {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.319627] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d9423-042e-5413-f3ec-ae2813b3faf0, 'name': SearchDatastore_Task, 'duration_secs': 0.012257} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.322753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.323032] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1055.323261] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.323402] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.323575] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1055.324399] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41bbd36e-7f01-47d0-a97a-622b0ff17de9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.330825] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a369f35-17ac-4500-8aa6-d57ccb577bcd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.338789] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1055.339439] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1055.352448] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f018700-2c21-4473-a1e3-456853f6124e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.355144] env[65503]: DEBUG nova.compute.provider_tree [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1055.362583] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1055.362583] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286c644-1353-1cf5-89b5-b854d55dced9" [ 1055.362583] env[65503]: _type = "Task" [ 1055.362583] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.370557] env[65503]: WARNING neutronclient.v2_0.client [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.372587] env[65503]: WARNING openstack [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.372587] env[65503]: WARNING openstack [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.384544] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286c644-1353-1cf5-89b5-b854d55dced9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.385421] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ab16b2-22e4-4ef3-863e-3bfa8b01fbfe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.397052] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d40510-d95a-4a4c-b5aa-5654b03384dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.413170] env[65503]: DEBUG nova.virt.block_device [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Updating existing volume attachment record: 2cbe7cac-7222-45bc-bd45-4b15ef6be9a5 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1055.484557] env[65503]: DEBUG nova.network.neutron [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updated VIF entry in instance network info cache for port 8eff3104-0a50-4cce-b7a9-35ca32ee32fd. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1055.484990] env[65503]: DEBUG nova.network.neutron [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updating instance_info_cache with network_info: [{"id": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "address": "fa:16:3e:9e:b7:dc", "network": {"id": "e65ab27f-25ec-415d-b0f1-4cee1c4a6a2e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1962350376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dbf0e9b08b741e88f971ec5f54dede8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eff3104-0a", "ovs_interfaceid": "8eff3104-0a50-4cce-b7a9-35ca32ee32fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1055.582924] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2616c715-bcab-4474-95a3-8eae80ac9295 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.595589] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450597, 'name': ReconfigVM_Task, 'duration_secs': 0.526637} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.596986] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Reconfigured VM instance instance-0000005c to attach disk [datastore2] bee97942-afb2-465f-9774-56f5aa8becca/bee97942-afb2-465f-9774-56f5aa8becca.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.597281] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance 'bee97942-afb2-465f-9774-56f5aa8becca' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.604670] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ed5dab-0304-47fd-aba3-b7096f2258ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.631773] env[65503]: DEBUG oslo_vmware.api [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271325} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.632778] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1055.632778] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1055.632778] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1055.632778] env[65503]: INFO nova.compute.manager [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1055.633092] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1055.645260] env[65503]: DEBUG nova.compute.manager [-] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1055.645433] env[65503]: DEBUG nova.network.neutron [-] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1055.645696] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.646283] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.646577] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.653303] env[65503]: DEBUG nova.compute.manager [req-08f33af5-a0d6-4e51-886a-6d5879509580 req-29579869-26f0-4968-bece-a543d72b97a9 service nova] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Detach interface failed, port_id=e2945b8a-8327-4ac8-8d42-fc828663c0e0, reason: Instance 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1055.700173] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.879205] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5286c644-1353-1cf5-89b5-b854d55dced9, 'name': SearchDatastore_Task, 'duration_secs': 0.01292} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.880270] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fc01981-7929-42bf-8f79-e0575dc181b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.889262] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1055.889262] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d4881-7c3f-3fae-937a-c46254462bd8" [ 1055.889262] env[65503]: _type = "Task" [ 1055.889262] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.900458] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d4881-7c3f-3fae-937a-c46254462bd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.912261] env[65503]: DEBUG nova.scheduler.client.report [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 131 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1055.912523] env[65503]: DEBUG nova.compute.provider_tree [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 131 to 132 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1055.912782] env[65503]: DEBUG nova.compute.provider_tree [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1055.988276] env[65503]: DEBUG oslo_concurrency.lockutils [req-e022c040-069f-4fb9-b9a1-612b58d07062 req-ff60cfee-0a6a-4fcb-98c3-86beb6d6179b service nova] Releasing lock "refresh_cache-f50dce3d-4ef2-46e3-9044-c53b55ddbefb" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.120168] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacc2357-2bd9-4ee2-948d-416391a79137 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.142761] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d225341-46a8-4fef-9d0d-f5f9d373cbe1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.161820] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance 'bee97942-afb2-465f-9774-56f5aa8becca' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1056.400437] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525d4881-7c3f-3fae-937a-c46254462bd8, 'name': SearchDatastore_Task, 'duration_secs': 0.014391} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.400724] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.401016] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f71dca10-3b68-4f1e-868e-a8c8271f7c88/f71dca10-3b68-4f1e-868e-a8c8271f7c88.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1056.401294] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c3beb84-af35-4b7c-969c-4f5d7b2a75d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.410797] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1056.410797] env[65503]: value = "task-4450601" [ 1056.410797] env[65503]: _type = "Task" [ 1056.410797] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.420866] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.423273] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.423789] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.939s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.424036] env[65503]: DEBUG nova.objects.instance [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lazy-loading 'resources' on Instance uuid bcc91c22-5f92-4233-a293-54f2790a750b {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.430122] env[65503]: DEBUG nova.network.neutron [-] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1056.450151] env[65503]: INFO nova.scheduler.client.report [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted allocations for instance 2effe3e4-ea22-4d9f-8f5c-38ee661611e3 [ 1056.660340] env[65503]: DEBUG nova.compute.manager [req-822886f1-dabd-4e1d-99bb-4354f9f1dd55 req-edbb7fc7-9aab-4e28-8959-cf40d8d440c1 service nova] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Received event network-vif-deleted-8eff3104-0a50-4cce-b7a9-35ca32ee32fd {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1056.668344] env[65503]: WARNING neutronclient.v2_0.client [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1056.705897] env[65503]: DEBUG nova.network.neutron [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Port 7309ef3a-1ef4-4d05-a35b-8aecb1167266 binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1056.710079] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.710321] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.710490] env[65503]: INFO nova.compute.manager [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Shelving [ 1056.925581] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450601, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.933143] env[65503]: INFO nova.compute.manager [-] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Took 1.29 seconds to deallocate network for instance. [ 1056.962143] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a6ce3bde-8d27-47a5-a1d7-4f5556e6bc88 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2effe3e4-ea22-4d9f-8f5c-38ee661611e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.063s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.187945] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2921dc-d58c-443c-b934-1a4bdfe384f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.198540] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbf9695-c418-4afa-8f19-af9b6349a08a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.240758] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f5d17b-2c9d-4b33-9c3a-755f0ba7347d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.252192] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b062a0b-eaad-4f6f-9c89-98fbf41df2a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.269341] env[65503]: DEBUG nova.compute.provider_tree [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1057.423805] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713014} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.424097] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] f71dca10-3b68-4f1e-868e-a8c8271f7c88/f71dca10-3b68-4f1e-868e-a8c8271f7c88.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1057.424311] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1057.424635] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11c1ad71-57cb-45ab-93b4-3476c3575958 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.433977] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1057.433977] env[65503]: value = "task-4450602" [ 1057.433977] env[65503]: _type = "Task" [ 1057.433977] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.444751] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.445110] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.732695] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.733096] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.750386] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.750886] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "bee97942-afb2-465f-9774-56f5aa8becca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.751118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.751290] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.754018] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92e09cb7-05ff-48c6-b86b-f7327e4873e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.769552] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1057.769552] env[65503]: value = "task-4450604" [ 1057.769552] env[65503]: _type = "Task" [ 1057.769552] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.782462] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450604, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.799400] env[65503]: ERROR nova.scheduler.client.report [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] [req-bea269b0-b47c-49fb-817a-c8cab9d3f6ab] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bea269b0-b47c-49fb-817a-c8cab9d3f6ab"}]} [ 1057.819716] env[65503]: DEBUG nova.scheduler.client.report [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1057.837246] env[65503]: DEBUG nova.scheduler.client.report [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1057.837545] env[65503]: DEBUG nova.compute.provider_tree [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1057.853278] env[65503]: DEBUG nova.scheduler.client.report [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1057.880036] env[65503]: DEBUG nova.scheduler.client.report [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1057.948136] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073781} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.948477] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.949309] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cc2747-9698-447b-8d2d-ec528c03e5b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.981294] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] f71dca10-3b68-4f1e-868e-a8c8271f7c88/f71dca10-3b68-4f1e-868e-a8c8271f7c88.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.985392] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92d63bfb-2127-4e55-b06b-ca82949df09e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.008169] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1058.008169] env[65503]: value = "task-4450605" [ 1058.008169] env[65503]: _type = "Task" [ 1058.008169] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.018919] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.237832] env[65503]: DEBUG nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1058.281217] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450604, 'name': PowerOffVM_Task, 'duration_secs': 0.382567} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.282527] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.283642] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491de9cf-c5af-4226-b741-fc65513e6a7e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.287063] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d82203c-2ca3-4517-a589-b2e594f7cb09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.313538] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31fd171-dc08-4a6d-91f4-53aab8ec29cf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.319240] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13a6f60-b4ae-42a4-adc5-c2e7bc755cea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.362626] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c71ec9c-0778-4144-bbe0-2cc8c3b4f203 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.373259] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b9efd0-95c8-4013-80f4-47aedbc733e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.390413] env[65503]: DEBUG nova.compute.provider_tree [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.520193] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450605, 'name': ReconfigVM_Task, 'duration_secs': 0.376013} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.520509] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Reconfigured VM instance instance-00000063 to attach disk [datastore2] f71dca10-3b68-4f1e-868e-a8c8271f7c88/f71dca10-3b68-4f1e-868e-a8c8271f7c88.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.521184] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be80ba8e-89c0-4d59-9a6f-f39164d72127 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.530906] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1058.530906] env[65503]: value = "task-4450606" [ 1058.530906] env[65503]: _type = "Task" [ 1058.530906] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.541045] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450606, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.765604] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.770072] env[65503]: WARNING neutronclient.v2_0.client [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1058.816895] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.817118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.817309] env[65503]: DEBUG nova.network.neutron [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1058.833308] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1058.833962] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ca4b76fd-da16-4d1c-8d32-71e95355d0d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.842966] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1058.842966] env[65503]: value = "task-4450607" [ 1058.842966] env[65503]: _type = "Task" [ 1058.842966] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.852031] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450607, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.862663] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd526-4079-2163-2727-b1ddba4fc29a/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1058.863645] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5142fa6-bd7d-47ad-b955-95b35b6769da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.870891] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd526-4079-2163-2727-b1ddba4fc29a/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1058.871069] env[65503]: ERROR oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd526-4079-2163-2727-b1ddba4fc29a/disk-0.vmdk due to incomplete transfer. [ 1058.871304] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5fa3e665-0e08-4dbb-bf23-b6d6267cbeed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.879370] env[65503]: DEBUG oslo_vmware.rw_handles [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd526-4079-2163-2727-b1ddba4fc29a/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1058.879560] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Uploaded image 5b2e951c-3e9a-4d3c-b99d-569f5b89e872 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1058.882392] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1058.882651] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-03602510-da91-4690-bfb9-ffd33deb1e3b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.889653] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1058.889653] env[65503]: value = "task-4450608" [ 1058.889653] env[65503]: _type = "Task" [ 1058.889653] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.901801] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450608, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.924142] env[65503]: DEBUG nova.scheduler.client.report [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 133 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1058.924495] env[65503]: DEBUG nova.compute.provider_tree [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 133 to 134 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1058.924696] env[65503]: DEBUG nova.compute.provider_tree [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.042409] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450606, 'name': Rename_Task, 'duration_secs': 0.147514} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.042693] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1059.042965] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47b0dbef-0412-4086-bacf-055891056e2d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.050650] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1059.050650] env[65503]: value = "task-4450609" [ 1059.050650] env[65503]: _type = "Task" [ 1059.050650] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.063798] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.321582] env[65503]: WARNING neutronclient.v2_0.client [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1059.322508] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1059.322882] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1059.354776] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450607, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.400941] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450608, 'name': Destroy_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.430292] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.006s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.432723] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.988s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.432847] env[65503]: DEBUG nova.objects.instance [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lazy-loading 'resources' on Instance uuid f50dce3d-4ef2-46e3-9044-c53b55ddbefb {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.445774] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1059.446473] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1059.462647] env[65503]: INFO nova.scheduler.client.report [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Deleted allocations for instance bcc91c22-5f92-4233-a293-54f2790a750b [ 1059.522187] env[65503]: WARNING neutronclient.v2_0.client [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1059.523023] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1059.523424] env[65503]: WARNING openstack [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1059.562496] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450609, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.624646] env[65503]: DEBUG nova.network.neutron [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance_info_cache with network_info: [{"id": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "address": "fa:16:3e:b6:80:b2", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309ef3a-1e", "ovs_interfaceid": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1059.855648] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450607, 'name': CreateSnapshot_Task, 'duration_secs': 0.965395} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.855648] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1059.856191] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9c4969-84bc-4532-818d-45e5c7cae95d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.900610] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450608, 'name': Destroy_Task, 'duration_secs': 0.741406} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.900883] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Destroyed the VM [ 1059.901132] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1059.901393] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-80b1ec89-20d2-485b-8629-7671aeb1d0c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.909795] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1059.909795] env[65503]: value = "task-4450610" [ 1059.909795] env[65503]: _type = "Task" [ 1059.909795] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.919525] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450610, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.967377] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1059.967651] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870460', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'name': 'volume-4ade83db-2b28-4173-9346-757fd8acf7f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fcdcabb9-f076-4fa9-ac30-3220eb6064da', 'attached_at': '', 'detached_at': '', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'serial': '4ade83db-2b28-4173-9346-757fd8acf7f7'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1059.968587] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0225c656-4ade-472c-9f93-3ad5ffea9386 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.976731] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b2be95dd-0265-4d39-98cd-a11c428f9f8f tempest-ServerDiskConfigTestJSON-898978153 tempest-ServerDiskConfigTestJSON-898978153-project-member] Lock "bcc91c22-5f92-4233-a293-54f2790a750b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.374s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.992604] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4908e08c-6ece-4891-b423-45fa2eaaa4dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.024423] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] volume-4ade83db-2b28-4173-9346-757fd8acf7f7/volume-4ade83db-2b28-4173-9346-757fd8acf7f7.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1060.027899] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49955302-31be-49f2-ad8a-ba1661c70452 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.048762] env[65503]: DEBUG oslo_vmware.api [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1060.048762] env[65503]: value = "task-4450611" [ 1060.048762] env[65503]: _type = "Task" [ 1060.048762] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.063742] env[65503]: DEBUG oslo_vmware.api [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450611, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.069750] env[65503]: DEBUG oslo_vmware.api [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450609, 'name': PowerOnVM_Task, 'duration_secs': 0.673376} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.070213] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1060.070503] env[65503]: INFO nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Took 9.19 seconds to spawn the instance on the hypervisor. [ 1060.070784] env[65503]: DEBUG nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1060.071887] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05beb156-2846-4311-8ca8-162af35d7181 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.128680] env[65503]: DEBUG oslo_concurrency.lockutils [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.217667] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8099c3-0597-47aa-aecd-c6da7d9e7d12 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.227438] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58d8529-20c6-4740-aabb-d345b33ec889 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.259637] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e3f01a-638f-4d11-b2e8-2b22e88d4a0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.268873] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911c8aa5-01d0-46ef-91c9-3dba4af2d232 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.284045] env[65503]: DEBUG nova.compute.provider_tree [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.375379] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1060.375733] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b16b3864-8d3d-4b19-ad03-8204f96ff49d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.388024] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1060.388024] env[65503]: value = "task-4450612" [ 1060.388024] env[65503]: _type = "Task" [ 1060.388024] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.396898] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450612, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.420983] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450610, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.526688] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "7802baf8-04ff-4df4-90b0-71cb97dddc83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.527034] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.560677] env[65503]: DEBUG oslo_vmware.api [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450611, 'name': ReconfigVM_Task, 'duration_secs': 0.475772} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.560959] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Reconfigured VM instance instance-00000058 to attach disk [datastore2] volume-4ade83db-2b28-4173-9346-757fd8acf7f7/volume-4ade83db-2b28-4173-9346-757fd8acf7f7.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.566692] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca568bc9-ed59-4fa6-a2fc-73dafed05024 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.585840] env[65503]: DEBUG oslo_vmware.api [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1060.585840] env[65503]: value = "task-4450613" [ 1060.585840] env[65503]: _type = "Task" [ 1060.585840] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.601757] env[65503]: DEBUG oslo_vmware.api [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450613, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.602636] env[65503]: INFO nova.compute.manager [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Took 18.96 seconds to build instance. [ 1060.665141] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0480032b-3de9-4dc8-b181-1e297c6edf98 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.687659] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d141eb8a-7f49-4449-b60a-ed622f577ed8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.696241] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance 'bee97942-afb2-465f-9774-56f5aa8becca' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1060.787342] env[65503]: DEBUG nova.scheduler.client.report [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.901669] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450612, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.922717] env[65503]: DEBUG oslo_vmware.api [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450610, 'name': RemoveSnapshot_Task, 'duration_secs': 0.841013} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.923149] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1060.923348] env[65503]: INFO nova.compute.manager [None req-b7c2888d-ed22-412d-abac-28b1b2d5f900 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Took 16.42 seconds to snapshot the instance on the hypervisor. [ 1061.030726] env[65503]: DEBUG nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1061.098908] env[65503]: DEBUG oslo_vmware.api [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450613, 'name': ReconfigVM_Task, 'duration_secs': 0.159243} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.099242] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870460', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'name': 'volume-4ade83db-2b28-4173-9346-757fd8acf7f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fcdcabb9-f076-4fa9-ac30-3220eb6064da', 'attached_at': '', 'detached_at': '', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'serial': '4ade83db-2b28-4173-9346-757fd8acf7f7'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1061.105655] env[65503]: DEBUG oslo_concurrency.lockutils [None req-719d6cc1-416c-4f02-9d75-131e56740b5a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.480s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.203588] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.204020] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67a49a1b-c101-4c62-803c-1125062d7d51 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.213813] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1061.213813] env[65503]: value = "task-4450614" [ 1061.213813] env[65503]: _type = "Task" [ 1061.213813] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.224145] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.293084] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.860s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.296267] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.531s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.298932] env[65503]: INFO nova.compute.claims [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.349665] env[65503]: INFO nova.scheduler.client.report [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Deleted allocations for instance f50dce3d-4ef2-46e3-9044-c53b55ddbefb [ 1061.406503] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450612, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.564701] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.565833] env[65503]: DEBUG nova.compute.manager [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1061.566032] env[65503]: DEBUG nova.compute.manager [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing instance network info cache due to event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1061.566247] env[65503]: DEBUG oslo_concurrency.lockutils [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.566384] env[65503]: DEBUG oslo_concurrency.lockutils [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.566538] env[65503]: DEBUG nova.network.neutron [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1061.728102] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.860854] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a7820e17-585e-4ce2-abf5-087fe3ebb8c4 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "f50dce3d-4ef2-46e3-9044-c53b55ddbefb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.894s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.900822] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450612, 'name': CloneVM_Task, 'duration_secs': 1.393186} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.901149] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Created linked-clone VM from snapshot [ 1061.902378] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da8a31b-0197-4649-a649-d0197f1dd7b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.912293] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Uploading image 4fad5934-e42b-4e2d-849e-59f48c65fe4b {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1061.942375] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1061.942375] env[65503]: value = "vm-870462" [ 1061.942375] env[65503]: _type = "VirtualMachine" [ 1061.942375] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1061.942714] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fe089a9a-cdb1-4ac0-a398-db15e494bbc7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.951186] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease: (returnval){ [ 1061.951186] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa002e-3d19-d434-c120-d26630b25773" [ 1061.951186] env[65503]: _type = "HttpNfcLease" [ 1061.951186] env[65503]: } obtained for exporting VM: (result){ [ 1061.951186] env[65503]: value = "vm-870462" [ 1061.951186] env[65503]: _type = "VirtualMachine" [ 1061.951186] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1061.951186] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the lease: (returnval){ [ 1061.951186] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa002e-3d19-d434-c120-d26630b25773" [ 1061.951186] env[65503]: _type = "HttpNfcLease" [ 1061.951186] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1061.959194] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1061.959194] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa002e-3d19-d434-c120-d26630b25773" [ 1061.959194] env[65503]: _type = "HttpNfcLease" [ 1061.959194] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1062.070765] env[65503]: WARNING neutronclient.v2_0.client [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1062.071510] env[65503]: WARNING openstack [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1062.071929] env[65503]: WARNING openstack [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1062.142498] env[65503]: DEBUG nova.objects.instance [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'flavor' on Instance uuid fcdcabb9-f076-4fa9-ac30-3220eb6064da {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.225373] env[65503]: DEBUG oslo_vmware.api [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450614, 'name': PowerOnVM_Task, 'duration_secs': 0.703001} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.225713] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.225906] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-00e34148-237a-4328-b481-114143c945f4 tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance 'bee97942-afb2-465f-9774-56f5aa8becca' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1062.463023] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1062.463023] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa002e-3d19-d434-c120-d26630b25773" [ 1062.463023] env[65503]: _type = "HttpNfcLease" [ 1062.463023] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1062.464517] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1062.464517] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fa002e-3d19-d434-c120-d26630b25773" [ 1062.464517] env[65503]: _type = "HttpNfcLease" [ 1062.464517] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1062.465698] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3115f802-18f1-4e93-b521-f646bceff75d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.476197] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528925fa-8c4e-d8da-3e42-d7baec24f834/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1062.476925] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528925fa-8c4e-d8da-3e42-d7baec24f834/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1062.581336] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fdd9c16a-be4e-428a-bea6-268baf4d4e47 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.647272] env[65503]: DEBUG oslo_concurrency.lockutils [None req-89877467-681e-418a-8267-f2af486f2aff tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.338s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.673478] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b07014-0ba0-42c3-be74-79bde2a0a65a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.684280] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe439883-211e-4c80-b998-0ad3685e5cec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.723168] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f64a62f-711c-44b1-989d-20940df035d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.732771] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cb827c-570d-4c82-97f2-011f303345c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.757280] env[65503]: DEBUG nova.compute.provider_tree [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.959136] env[65503]: WARNING openstack [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1062.959136] env[65503]: WARNING openstack [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1063.180590] env[65503]: WARNING neutronclient.v2_0.client [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1063.181713] env[65503]: WARNING openstack [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1063.182480] env[65503]: WARNING openstack [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1063.263541] env[65503]: DEBUG nova.scheduler.client.report [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.449124] env[65503]: DEBUG nova.network.neutron [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updated VIF entry in instance network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1063.449124] env[65503]: DEBUG nova.network.neutron [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1063.664543] env[65503]: DEBUG nova.compute.manager [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1063.665364] env[65503]: DEBUG nova.compute.manager [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing instance network info cache due to event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1063.665843] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.665843] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.666527] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1063.773227] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.773933] env[65503]: DEBUG nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1063.777939] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.213s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.779202] env[65503]: INFO nova.compute.claims [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1063.952454] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.952937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.955504] env[65503]: DEBUG oslo_concurrency.lockutils [req-d185be88-4efc-433a-8085-fdcde3bd18b7 req-d7887552-d414-48f9-87fa-5f43cfd4949e service nova] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.170104] env[65503]: WARNING neutronclient.v2_0.client [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1064.171144] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1064.171451] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1064.285640] env[65503]: DEBUG nova.compute.utils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1064.291450] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1064.292740] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1064.299611] env[65503]: DEBUG nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1064.300275] env[65503]: DEBUG nova.network.neutron [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1064.300378] env[65503]: WARNING neutronclient.v2_0.client [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1064.301095] env[65503]: WARNING neutronclient.v2_0.client [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1064.302259] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1064.302969] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1064.375393] env[65503]: DEBUG nova.policy [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d16022d9f8b43ba8e97191fdc2b1a8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3658921b747e4d78a2046b838cb36d26', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1064.433196] env[65503]: WARNING neutronclient.v2_0.client [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1064.434776] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1064.435659] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1064.457617] env[65503]: INFO nova.compute.manager [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Detaching volume 4ade83db-2b28-4173-9346-757fd8acf7f7 [ 1064.508645] env[65503]: INFO nova.virt.block_device [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Attempting to driver detach volume 4ade83db-2b28-4173-9346-757fd8acf7f7 from mountpoint /dev/sdb [ 1064.508868] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1064.512488] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870460', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'name': 'volume-4ade83db-2b28-4173-9346-757fd8acf7f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fcdcabb9-f076-4fa9-ac30-3220eb6064da', 'attached_at': '', 'detached_at': '', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'serial': '4ade83db-2b28-4173-9346-757fd8acf7f7'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1064.513517] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56666f08-ce11-4478-b766-be6e8956c40a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.550832] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5dbc861-270d-41bc-8988-e51b78c6126e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.563592] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7fc69c-1b39-456b-9ddf-1b759240b0ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.568158] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "96d8f433-9b86-422f-88ef-99836fb21f30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.568448] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "96d8f433-9b86-422f-88ef-99836fb21f30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.568673] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "96d8f433-9b86-422f-88ef-99836fb21f30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.568871] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "96d8f433-9b86-422f-88ef-99836fb21f30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.569046] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "96d8f433-9b86-422f-88ef-99836fb21f30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.592817] env[65503]: INFO nova.compute.manager [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Terminating instance [ 1064.608949] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2272de80-2e02-476e-b967-cd09084acb9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.617234] env[65503]: DEBUG nova.compute.manager [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1064.617571] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.619373] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43308634-d803-4ded-8097-aeeb98a2f7db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.638465] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The volume has not been displaced from its original location: [datastore2] volume-4ade83db-2b28-4173-9346-757fd8acf7f7/volume-4ade83db-2b28-4173-9346-757fd8acf7f7.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1064.644261] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1064.649943] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6074841-aae5-483f-b2f9-9e29c6cd72fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.663743] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.668215] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updated VIF entry in instance network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1064.668215] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1064.672022] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6fbc1aee-6dbb-4076-8653-c92de892e793 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.680693] env[65503]: DEBUG oslo_vmware.api [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1064.680693] env[65503]: value = "task-4450616" [ 1064.680693] env[65503]: _type = "Task" [ 1064.680693] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.683342] env[65503]: DEBUG oslo_vmware.api [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1064.683342] env[65503]: value = "task-4450617" [ 1064.683342] env[65503]: _type = "Task" [ 1064.683342] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.708588] env[65503]: DEBUG oslo_vmware.api [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450616, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.709290] env[65503]: DEBUG oslo_vmware.api [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.718176] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed17e80-b291-4263-a0da-b4790ce4add5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.729092] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9138556e-05b4-466d-8be8-0075e604d3ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.768273] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9281623-b7ee-4253-aa5c-7396e8cf77bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.779219] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ebc995-05c5-4a93-9fd5-ee1452f9fb74 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.797245] env[65503]: DEBUG nova.compute.provider_tree [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.800021] env[65503]: DEBUG nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1064.863539] env[65503]: DEBUG nova.network.neutron [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Successfully created port: f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1065.173496] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.173846] env[65503]: DEBUG nova.compute.manager [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1065.174047] env[65503]: DEBUG nova.compute.manager [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing instance network info cache due to event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1065.174291] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.174436] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.174669] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1065.195884] env[65503]: DEBUG oslo_vmware.api [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450616, 'name': PowerOffVM_Task, 'duration_secs': 0.340153} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.196991] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.197143] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.197432] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21410de7-3dae-4f04-b62a-b956a2e9b6c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.203796] env[65503]: DEBUG oslo_vmware.api [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450617, 'name': ReconfigVM_Task, 'duration_secs': 0.365362} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.205307] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1065.210379] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0ee02b1-99f9-4dc5-99fe-6394fed128e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.229589] env[65503]: DEBUG oslo_vmware.api [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1065.229589] env[65503]: value = "task-4450619" [ 1065.229589] env[65503]: _type = "Task" [ 1065.229589] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.242999] env[65503]: DEBUG oslo_vmware.api [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450619, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.281339] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.281821] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.281939] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Deleting the datastore file [datastore1] 96d8f433-9b86-422f-88ef-99836fb21f30 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.282570] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d71ecae-f6fd-423d-b940-9395567f011f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.291873] env[65503]: DEBUG oslo_vmware.api [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for the task: (returnval){ [ 1065.291873] env[65503]: value = "task-4450620" [ 1065.291873] env[65503]: _type = "Task" [ 1065.291873] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.300592] env[65503]: DEBUG nova.scheduler.client.report [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.310654] env[65503]: DEBUG oslo_vmware.api [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.368416] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "bee97942-afb2-465f-9774-56f5aa8becca" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.368822] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.369828] env[65503]: DEBUG nova.compute.manager [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Going to confirm migration 6 {{(pid=65503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1065.677867] env[65503]: WARNING neutronclient.v2_0.client [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1065.678568] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1065.678967] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1065.741222] env[65503]: DEBUG oslo_vmware.api [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450619, 'name': ReconfigVM_Task, 'duration_secs': 0.179124} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.741585] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870460', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'name': 'volume-4ade83db-2b28-4173-9346-757fd8acf7f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fcdcabb9-f076-4fa9-ac30-3220eb6064da', 'attached_at': '', 'detached_at': '', 'volume_id': '4ade83db-2b28-4173-9346-757fd8acf7f7', 'serial': '4ade83db-2b28-4173-9346-757fd8acf7f7'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1065.781809] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1065.782174] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1065.802733] env[65503]: DEBUG oslo_vmware.api [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Task: {'id': task-4450620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208898} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.803063] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.803291] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.803495] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.803754] env[65503]: INFO nova.compute.manager [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1065.803963] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1065.804213] env[65503]: DEBUG nova.compute.manager [-] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1065.804328] env[65503]: DEBUG nova.network.neutron [-] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1065.804609] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1065.805241] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1065.805507] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1065.813667] env[65503]: DEBUG nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1065.816575] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.039s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.817145] env[65503]: DEBUG nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1065.848292] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1065.853049] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1065.853451] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.853711] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1065.853960] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.854135] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1065.854327] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1065.854632] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1065.854802] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1065.855026] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1065.855220] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1065.855396] env[65503]: DEBUG nova.virt.hardware [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1065.856414] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac3c69b-7b1e-4d27-aba6-0707479ad95c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.866808] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c5af19-4a0d-4d4b-806f-95d6d19e7bc2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.885548] env[65503]: WARNING neutronclient.v2_0.client [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1065.886251] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1065.886609] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1065.895622] env[65503]: WARNING neutronclient.v2_0.client [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1066.143183] env[65503]: WARNING neutronclient.v2_0.client [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1066.143618] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.143770] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquired lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.143937] env[65503]: DEBUG nova.network.neutron [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1066.144126] env[65503]: DEBUG nova.objects.instance [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'info_cache' on Instance uuid bee97942-afb2-465f-9774-56f5aa8becca {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.209263] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updated VIF entry in instance network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1066.209671] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1066.292399] env[65503]: DEBUG nova.objects.instance [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'flavor' on Instance uuid fcdcabb9-f076-4fa9-ac30-3220eb6064da {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.297489] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.297731] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.326958] env[65503]: DEBUG nova.compute.utils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1066.327857] env[65503]: DEBUG nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1066.328031] env[65503]: DEBUG nova.network.neutron [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1066.328452] env[65503]: WARNING neutronclient.v2_0.client [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1066.329023] env[65503]: WARNING neutronclient.v2_0.client [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1066.329948] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1066.329948] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1066.385479] env[65503]: DEBUG nova.policy [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e97b5208de384c19bbc0e332b67fc4ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c5b0c3771b5434992cd58e1af539bde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1066.466933] env[65503]: DEBUG nova.network.neutron [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Successfully updated port: f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1066.572330] env[65503]: DEBUG nova.network.neutron [-] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1066.674797] env[65503]: DEBUG nova.network.neutron [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Successfully created port: 099eaaaa-f2b2-44dc-98c7-be6801506e99 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1066.713981] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.714650] env[65503]: DEBUG nova.compute.manager [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1066.716228] env[65503]: DEBUG nova.compute.manager [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing instance network info cache due to event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1066.716228] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.716228] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.716228] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1066.800998] env[65503]: DEBUG nova.compute.utils [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1066.839014] env[65503]: DEBUG nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1066.969690] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.969970] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.970282] env[65503]: DEBUG nova.network.neutron [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1067.076049] env[65503]: INFO nova.compute.manager [-] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Took 1.27 seconds to deallocate network for instance. [ 1067.158066] env[65503]: WARNING neutronclient.v2_0.client [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1067.158066] env[65503]: WARNING openstack [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.158465] env[65503]: WARNING openstack [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.218896] env[65503]: WARNING neutronclient.v2_0.client [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1067.219916] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.220063] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.289281] env[65503]: WARNING openstack [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.289911] env[65503]: WARNING openstack [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.301363] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2589addf-8887-405b-8958-e3528bad09d2 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.348s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.304178] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.393440] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.393858] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.423134] env[65503]: WARNING neutronclient.v2_0.client [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1067.423866] env[65503]: WARNING openstack [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.424295] env[65503]: WARNING openstack [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.474259] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.474921] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.513716] env[65503]: WARNING neutronclient.v2_0.client [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1067.514487] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.514959] env[65503]: WARNING openstack [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.542350] env[65503]: DEBUG nova.network.neutron [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1067.578895] env[65503]: DEBUG nova.network.neutron [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance_info_cache with network_info: [{"id": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "address": "fa:16:3e:b6:80:b2", "network": {"id": "0188a133-0246-4dcc-93e4-f4c6871df484", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1118761917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "463e93d05e1e4b27a3dc866a5b1991d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309ef3a-1e", "ovs_interfaceid": "7309ef3a-1ef4-4d05-a35b-8aecb1167266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1067.585193] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.585193] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.585193] env[65503]: DEBUG nova.objects.instance [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lazy-loading 'resources' on Instance uuid 96d8f433-9b86-422f-88ef-99836fb21f30 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.630244] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.630810] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.667458] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updated VIF entry in instance network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1067.667854] env[65503]: DEBUG nova.network.neutron [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1067.721135] env[65503]: WARNING neutronclient.v2_0.client [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1067.721932] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.722379] env[65503]: WARNING openstack [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.808847] env[65503]: DEBUG nova.network.neutron [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1067.856316] env[65503]: DEBUG nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1067.887292] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6918124e5e0625c1fda68e304f1ee8b7',container_format='bare',created_at=2025-11-14T15:55:12Z,direct_url=,disk_format='vmdk',id=5b2e951c-3e9a-4d3c-b99d-569f5b89e872,min_disk=1,min_ram=0,name='tempest-test-snap-180006363',owner='8c5b0c3771b5434992cd58e1af539bde',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-11-14T15:55:28Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1067.888103] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1067.888103] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1067.888103] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1067.888103] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1067.888452] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1067.888452] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1067.888452] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1067.888681] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1067.888766] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1067.889338] env[65503]: DEBUG nova.virt.hardware [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1067.890192] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d386a24-fa42-4182-ad36-5b6d632661da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.901640] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3dfcb2-22d5-44bf-a0d3-9fe92dfa75d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.912693] env[65503]: DEBUG nova.compute.manager [req-0bc13b2d-e5e2-41a8-8b96-55f55b8d98d9 req-888cf157-5cf3-4fa5-a3f0-a7904bbabb31 service nova] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Received event network-vif-deleted-4089f68d-952e-4b5f-8578-0ce2a0afb9f1 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1067.931324] env[65503]: DEBUG nova.compute.manager [req-ed89f689-c89c-4bf8-a270-ed7211262cf5 req-619ed781-79e3-4619-b56f-19ce52cc34d5 service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Received event network-vif-plugged-f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1067.931549] env[65503]: DEBUG oslo_concurrency.lockutils [req-ed89f689-c89c-4bf8-a270-ed7211262cf5 req-619ed781-79e3-4619-b56f-19ce52cc34d5 service nova] Acquiring lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.931762] env[65503]: DEBUG oslo_concurrency.lockutils [req-ed89f689-c89c-4bf8-a270-ed7211262cf5 req-619ed781-79e3-4619-b56f-19ce52cc34d5 service nova] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.931928] env[65503]: DEBUG oslo_concurrency.lockutils [req-ed89f689-c89c-4bf8-a270-ed7211262cf5 req-619ed781-79e3-4619-b56f-19ce52cc34d5 service nova] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.932319] env[65503]: DEBUG nova.compute.manager [req-ed89f689-c89c-4bf8-a270-ed7211262cf5 req-619ed781-79e3-4619-b56f-19ce52cc34d5 service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] No waiting events found dispatching network-vif-plugged-f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1067.932631] env[65503]: WARNING nova.compute.manager [req-ed89f689-c89c-4bf8-a270-ed7211262cf5 req-619ed781-79e3-4619-b56f-19ce52cc34d5 service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Received unexpected event network-vif-plugged-f8cdf0ee-e818-41cd-b8aa-b485ea737879 for instance with vm_state building and task_state spawning. [ 1068.085558] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Releasing lock "refresh_cache-bee97942-afb2-465f-9774-56f5aa8becca" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.085558] env[65503]: DEBUG nova.objects.instance [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lazy-loading 'migration_context' on Instance uuid bee97942-afb2-465f-9774-56f5aa8becca {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.170433] env[65503]: DEBUG oslo_concurrency.lockutils [req-4cfda9fd-6d83-4d8b-ab69-b5a8f3fd75c1 req-e29c1e8e-1248-4a26-89d0-1d0d88c892d3 service nova] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.223896] env[65503]: DEBUG nova.network.neutron [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Successfully updated port: 099eaaaa-f2b2-44dc-98c7-be6801506e99 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1068.312370] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.312772] env[65503]: DEBUG nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Instance network_info: |[{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1068.313295] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:76:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8cdf0ee-e818-41cd-b8aa-b485ea737879', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1068.321207] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1068.323372] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1068.323526] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f599fb-e432-43b4-bf05-3ab0d9873052 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.327090] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95ada43a-56c0-46f7-a2ea-cff00d7c02ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.349692] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786af030-1154-400f-a6df-62d3ee8a54c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.354425] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1068.354425] env[65503]: value = "task-4450621" [ 1068.354425] env[65503]: _type = "Task" [ 1068.354425] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.385036] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0029a5-b35f-4c1e-9993-09f59397f2fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.391375] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450621, 'name': CreateVM_Task} progress is 15%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.396736] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.397222] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.397511] env[65503]: INFO nova.compute.manager [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Attaching volume 09387433-6551-4123-9495-cdd30655e030 to /dev/sdb [ 1068.400719] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f93e5b8-7768-44e5-92c6-34c068f38375 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.419308] env[65503]: DEBUG nova.compute.provider_tree [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1068.443531] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d116c1-0ebf-42fc-929c-e04e5582c0ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.451764] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbeb12b8-7623-4690-a8f6-02e51e904ad9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.467418] env[65503]: DEBUG nova.virt.block_device [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updating existing volume attachment record: f0a3748d-a18e-4608-a19f-a67aba3858de {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1068.588263] env[65503]: DEBUG nova.objects.base [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1068.589529] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd80837-f159-4c82-9191-83a88d2cda85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.614480] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c22e34ac-4c22-408d-9508-04777ed88187 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.624965] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1068.624965] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520881ee-f38f-b1ad-6a22-9d0605d02764" [ 1068.624965] env[65503]: _type = "Task" [ 1068.624965] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.632687] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520881ee-f38f-b1ad-6a22-9d0605d02764, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.725982] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "refresh_cache-7802baf8-04ff-4df4-90b0-71cb97dddc83" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.726234] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "refresh_cache-7802baf8-04ff-4df4-90b0-71cb97dddc83" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.726441] env[65503]: DEBUG nova.network.neutron [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1068.868117] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450621, 'name': CreateVM_Task, 'duration_secs': 0.449889} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.868313] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.868850] env[65503]: WARNING neutronclient.v2_0.client [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1068.869246] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.869397] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.869724] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.870345] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd333b5d-fda3-4f5e-a14a-261463789841 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.876403] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1068.876403] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528df5a8-3f5a-5907-8d48-e7150af3d167" [ 1068.876403] env[65503]: _type = "Task" [ 1068.876403] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.885659] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528df5a8-3f5a-5907-8d48-e7150af3d167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.926944] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.927265] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.927537] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.927787] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.927987] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.930512] env[65503]: INFO nova.compute.manager [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Terminating instance [ 1068.945888] env[65503]: ERROR nova.scheduler.client.report [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] [req-8711c894-8a03-485e-b8c9-afa4cc794653] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 988ff85a-1d12-41bb-a369-e298e8491ca1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8711c894-8a03-485e-b8c9-afa4cc794653"}]} [ 1068.964840] env[65503]: DEBUG nova.scheduler.client.report [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1068.981137] env[65503]: DEBUG nova.scheduler.client.report [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1068.981450] env[65503]: DEBUG nova.compute.provider_tree [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1068.996060] env[65503]: DEBUG nova.scheduler.client.report [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1069.017360] env[65503]: DEBUG nova.scheduler.client.report [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1069.134932] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520881ee-f38f-b1ad-6a22-9d0605d02764, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.134932] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.231361] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1069.231777] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1069.255321] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c08f4fd-fac6-45ae-9cc1-b6b890ca93e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.264356] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239d722a-a17a-4522-9914-4b90dc36a85e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.303566] env[65503]: DEBUG nova.network.neutron [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1069.306923] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b562cb-288c-4da6-818a-97b8729f91f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.317337] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ba946b-6b1b-4256-b8aa-c34784fc5df6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.335846] env[65503]: DEBUG nova.compute.provider_tree [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1069.338813] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1069.339205] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1069.394312] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528df5a8-3f5a-5907-8d48-e7150af3d167, 'name': SearchDatastore_Task, 'duration_secs': 0.045381} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.394312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.394493] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.394805] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.395027] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.395272] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.395671] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9d07150-cd39-4ed3-b128-490a760b056f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.418267] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.418495] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.419328] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-440d3cff-f960-4e96-a761-2b22932a15a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.427511] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1069.427511] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5209cd29-effa-e2ab-6364-ca15f73b4f61" [ 1069.427511] env[65503]: _type = "Task" [ 1069.427511] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.440956] env[65503]: DEBUG nova.compute.manager [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1069.441318] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.441783] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5209cd29-effa-e2ab-6364-ca15f73b4f61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.443765] env[65503]: WARNING neutronclient.v2_0.client [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1069.444838] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1069.445422] env[65503]: WARNING openstack [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1069.459277] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7806792-cb94-4030-af0b-c81e7275388d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.472114] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.472566] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09238d52-4c86-4675-8390-3b55b356cdd1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.481440] env[65503]: DEBUG oslo_vmware.api [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1069.481440] env[65503]: value = "task-4450623" [ 1069.481440] env[65503]: _type = "Task" [ 1069.481440] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.491886] env[65503]: DEBUG oslo_vmware.api [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.563831] env[65503]: DEBUG nova.network.neutron [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Updating instance_info_cache with network_info: [{"id": "099eaaaa-f2b2-44dc-98c7-be6801506e99", "address": "fa:16:3e:2a:14:5d", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099eaaaa-f2", "ovs_interfaceid": "099eaaaa-f2b2-44dc-98c7-be6801506e99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1069.880861] env[65503]: DEBUG nova.scheduler.client.report [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Updated inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1069.881285] env[65503]: DEBUG nova.compute.provider_tree [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Updating resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 generation from 135 to 136 during operation: update_inventory {{(pid=65503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1069.881536] env[65503]: DEBUG nova.compute.provider_tree [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1069.940506] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5209cd29-effa-e2ab-6364-ca15f73b4f61, 'name': SearchDatastore_Task, 'duration_secs': 0.018713} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.941579] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55a6cdce-5f3d-4658-9a22-022c339760c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.949339] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1069.949339] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5296ca3a-4a12-1613-c1b4-b8dbb82e9994" [ 1069.949339] env[65503]: _type = "Task" [ 1069.949339] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.958763] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5296ca3a-4a12-1613-c1b4-b8dbb82e9994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.966636] env[65503]: DEBUG nova.compute.manager [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Received event network-changed-f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1069.966881] env[65503]: DEBUG nova.compute.manager [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Refreshing instance network info cache due to event network-changed-f8cdf0ee-e818-41cd-b8aa-b485ea737879. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1069.967037] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Acquiring lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.967182] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Acquired lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.967381] env[65503]: DEBUG nova.network.neutron [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Refreshing network info cache for port f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1069.992937] env[65503]: DEBUG oslo_vmware.api [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450623, 'name': PowerOffVM_Task, 'duration_secs': 0.24747} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.993234] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1069.993401] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.993665] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0a1720b-7cd0-471e-8f52-54615741cf4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.063456] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.063842] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.064034] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleting the datastore file [datastore1] fcdcabb9-f076-4fa9-ac30-3220eb6064da {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.064793] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87f1158f-827f-4d84-ae43-06f9855596ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.067388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "refresh_cache-7802baf8-04ff-4df4-90b0-71cb97dddc83" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.067845] env[65503]: DEBUG nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Instance network_info: |[{"id": "099eaaaa-f2b2-44dc-98c7-be6801506e99", "address": "fa:16:3e:2a:14:5d", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099eaaaa-f2", "ovs_interfaceid": "099eaaaa-f2b2-44dc-98c7-be6801506e99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1070.068439] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:14:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '099eaaaa-f2b2-44dc-98c7-be6801506e99', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1070.078524] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1070.079493] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1070.079841] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54bd82e7-6920-4e4e-99b5-87556167a77d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.100549] env[65503]: DEBUG oslo_vmware.api [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1070.100549] env[65503]: value = "task-4450625" [ 1070.100549] env[65503]: _type = "Task" [ 1070.100549] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.107695] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1070.107695] env[65503]: value = "task-4450626" [ 1070.107695] env[65503]: _type = "Task" [ 1070.107695] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.114910] env[65503]: DEBUG oslo_vmware.api [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.121326] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450626, 'name': CreateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.217203] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528925fa-8c4e-d8da-3e42-d7baec24f834/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1070.218234] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e441379-2e57-401e-a447-cfa271bd7941 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.226334] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528925fa-8c4e-d8da-3e42-d7baec24f834/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1070.226508] env[65503]: ERROR oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528925fa-8c4e-d8da-3e42-d7baec24f834/disk-0.vmdk due to incomplete transfer. [ 1070.226773] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c59387fc-f62f-4857-859e-7ad11a52962e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.237179] env[65503]: DEBUG oslo_vmware.rw_handles [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528925fa-8c4e-d8da-3e42-d7baec24f834/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1070.237582] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Uploaded image 4fad5934-e42b-4e2d-849e-59f48c65fe4b to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1070.240320] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1070.240626] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e0bd157e-8a41-45cc-a969-da6256b90db2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.249706] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1070.249706] env[65503]: value = "task-4450627" [ 1070.249706] env[65503]: _type = "Task" [ 1070.249706] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.259019] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450627, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.387472] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.803s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.389955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.255s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.419498] env[65503]: INFO nova.scheduler.client.report [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Deleted allocations for instance 96d8f433-9b86-422f-88ef-99836fb21f30 [ 1070.462037] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5296ca3a-4a12-1613-c1b4-b8dbb82e9994, 'name': SearchDatastore_Task, 'duration_secs': 0.010174} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.462459] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.462818] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.463138] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-880ac5fa-2e5c-4134-ae08-a072ef8696e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.469456] env[65503]: WARNING neutronclient.v2_0.client [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1070.470159] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1070.470514] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1070.479532] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1070.479532] env[65503]: value = "task-4450628" [ 1070.479532] env[65503]: _type = "Task" [ 1070.479532] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.489450] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.595755] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1070.596212] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1070.617301] env[65503]: DEBUG oslo_vmware.api [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241237} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.618229] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1070.618510] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1070.618711] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1070.618905] env[65503]: INFO nova.compute.manager [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1070.619161] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1070.619384] env[65503]: DEBUG nova.compute.manager [-] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1070.619517] env[65503]: DEBUG nova.network.neutron [-] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1070.619850] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1070.620410] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1070.620664] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1070.630842] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450626, 'name': CreateVM_Task, 'duration_secs': 0.394757} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.635173] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1070.635843] env[65503]: WARNING neutronclient.v2_0.client [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1070.636211] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.636361] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.636712] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1070.636988] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbfe02f1-b12f-40da-a26d-2b186dce455e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.642173] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1070.642173] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52724977-ecf0-6c54-93e2-5d3c7df41ebb" [ 1070.642173] env[65503]: _type = "Task" [ 1070.642173] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.653212] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52724977-ecf0-6c54-93e2-5d3c7df41ebb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.665401] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1070.672999] env[65503]: WARNING neutronclient.v2_0.client [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1070.673704] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1070.674106] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1070.762871] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450627, 'name': Destroy_Task, 'duration_secs': 0.372647} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.763166] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Destroyed the VM [ 1070.763674] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1070.763972] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-548a0e4e-6678-4b84-b10b-1e2bb855528a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.768807] env[65503]: DEBUG nova.network.neutron [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updated VIF entry in instance network info cache for port f8cdf0ee-e818-41cd-b8aa-b485ea737879. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1070.769192] env[65503]: DEBUG nova.network.neutron [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1070.777066] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1070.777066] env[65503]: value = "task-4450630" [ 1070.777066] env[65503]: _type = "Task" [ 1070.777066] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.788168] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450630, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.853018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-1d1a96cc-63b3-472c-b94a-1ea00763f770-884998aa-5fa4-410b-87e8-843ac2653ab7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.853283] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-1d1a96cc-63b3-472c-b94a-1ea00763f770-884998aa-5fa4-410b-87e8-843ac2653ab7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.853720] env[65503]: DEBUG nova.objects.instance [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'flavor' on Instance uuid 1d1a96cc-63b3-472c-b94a-1ea00763f770 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.927352] env[65503]: DEBUG oslo_concurrency.lockutils [None req-03b57826-25d1-4856-bf17-563f1ac64792 tempest-SecurityGroupsTestJSON-421579552 tempest-SecurityGroupsTestJSON-421579552-project-member] Lock "96d8f433-9b86-422f-88ef-99836fb21f30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.359s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.992679] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450628, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.124922] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46c46ae-9c00-4329-811a-a53bddc52d2a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.137541] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61a9d62-8a79-47cb-97b3-71d33cd88419 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.174234] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c032c63c-3d62-4ba0-a896-72ac7da266df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.183441] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.183705] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Processing image 5b2e951c-3e9a-4d3c-b99d-569f5b89e872 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.183952] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872/5b2e951c-3e9a-4d3c-b99d-569f5b89e872.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.184110] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872/5b2e951c-3e9a-4d3c-b99d-569f5b89e872.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.184294] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.184680] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4c55efd-3dfd-4c7c-841f-9903114c25af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.188174] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03f21da-8995-45e2-84fe-12f549a5f93a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.208766] env[65503]: DEBUG nova.compute.provider_tree [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.212044] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.212354] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1071.213423] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c34098b-89e4-4b8b-90f8-62a68eea1c14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.221650] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1071.221650] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52215c28-c185-bf09-065f-67fc91ccbe67" [ 1071.221650] env[65503]: _type = "Task" [ 1071.221650] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.235232] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52215c28-c185-bf09-065f-67fc91ccbe67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.271695] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Releasing lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.272054] env[65503]: DEBUG nova.compute.manager [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Received event network-vif-plugged-099eaaaa-f2b2-44dc-98c7-be6801506e99 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1071.272194] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Acquiring lock "7802baf8-04ff-4df4-90b0-71cb97dddc83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.272409] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.272568] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.272741] env[65503]: DEBUG nova.compute.manager [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] No waiting events found dispatching network-vif-plugged-099eaaaa-f2b2-44dc-98c7-be6801506e99 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1071.272951] env[65503]: WARNING nova.compute.manager [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Received unexpected event network-vif-plugged-099eaaaa-f2b2-44dc-98c7-be6801506e99 for instance with vm_state building and task_state spawning. [ 1071.273305] env[65503]: DEBUG nova.compute.manager [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Received event network-changed-099eaaaa-f2b2-44dc-98c7-be6801506e99 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1071.273305] env[65503]: DEBUG nova.compute.manager [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Refreshing instance network info cache due to event network-changed-099eaaaa-f2b2-44dc-98c7-be6801506e99. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1071.273457] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Acquiring lock "refresh_cache-7802baf8-04ff-4df4-90b0-71cb97dddc83" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.273584] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Acquired lock "refresh_cache-7802baf8-04ff-4df4-90b0-71cb97dddc83" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.273734] env[65503]: DEBUG nova.network.neutron [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Refreshing network info cache for port 099eaaaa-f2b2-44dc-98c7-be6801506e99 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1071.290224] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450630, 'name': RemoveSnapshot_Task, 'duration_secs': 0.380089} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.290224] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1071.290609] env[65503]: DEBUG nova.compute.manager [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1071.291332] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2b8984-6319-4887-964a-e69b5f7b1767 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.359983] env[65503]: WARNING neutronclient.v2_0.client [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.361414] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1071.361755] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1071.489406] env[65503]: DEBUG nova.objects.instance [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'pci_requests' on Instance uuid 1d1a96cc-63b3-472c-b94a-1ea00763f770 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.496834] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.908321} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.496834] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.497090] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1071.497277] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da89b96c-00b3-454c-aa4a-a95d7ea71880 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.505113] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1071.505113] env[65503]: value = "task-4450631" [ 1071.505113] env[65503]: _type = "Task" [ 1071.505113] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.514007] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450631, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.632713] env[65503]: DEBUG nova.network.neutron [-] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1071.715980] env[65503]: DEBUG nova.scheduler.client.report [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.732693] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Preparing fetch location {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1071.733016] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Fetch image to [datastore1] OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d/OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d.vmdk {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1071.733212] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Downloading stream optimized image 5b2e951c-3e9a-4d3c-b99d-569f5b89e872 to [datastore1] OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d/OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d.vmdk on the data store datastore1 as vApp {{(pid=65503) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1071.733390] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Downloading image file data 5b2e951c-3e9a-4d3c-b99d-569f5b89e872 to the ESX as VM named 'OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d' {{(pid=65503) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1071.776571] env[65503]: WARNING neutronclient.v2_0.client [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.777612] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1071.777974] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1071.806189] env[65503]: INFO nova.compute.manager [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Shelve offloading [ 1071.812057] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1071.812057] env[65503]: value = "resgroup-9" [ 1071.812057] env[65503]: _type = "ResourcePool" [ 1071.812057] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1071.812267] env[65503]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e2862853-c8a0-4a00-88d6-0cb7c4e91d17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.836999] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease: (returnval){ [ 1071.836999] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527de60a-2a14-4dd6-b4c8-458b945ca46a" [ 1071.836999] env[65503]: _type = "HttpNfcLease" [ 1071.836999] env[65503]: } obtained for vApp import into resource pool (val){ [ 1071.836999] env[65503]: value = "resgroup-9" [ 1071.836999] env[65503]: _type = "ResourcePool" [ 1071.836999] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1071.837500] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the lease: (returnval){ [ 1071.837500] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527de60a-2a14-4dd6-b4c8-458b945ca46a" [ 1071.837500] env[65503]: _type = "HttpNfcLease" [ 1071.837500] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1071.844425] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1071.844425] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527de60a-2a14-4dd6-b4c8-458b945ca46a" [ 1071.844425] env[65503]: _type = "HttpNfcLease" [ 1071.844425] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1071.892711] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1071.893150] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1071.958308] env[65503]: WARNING neutronclient.v2_0.client [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.959109] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1071.959528] env[65503]: WARNING openstack [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1071.992835] env[65503]: DEBUG nova.objects.base [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Object Instance<1d1a96cc-63b3-472c-b94a-1ea00763f770> lazy-loaded attributes: flavor,pci_requests {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1071.993140] env[65503]: DEBUG nova.network.neutron [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1071.993512] env[65503]: WARNING neutronclient.v2_0.client [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.993900] env[65503]: WARNING neutronclient.v2_0.client [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.994702] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1071.995124] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1072.018431] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450631, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085866} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.018870] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1072.019965] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5d5ef7-c31c-42ba-b418-b40664bafa46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.045602] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1072.050117] env[65503]: DEBUG nova.network.neutron [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Updated VIF entry in instance network info cache for port 099eaaaa-f2b2-44dc-98c7-be6801506e99. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1072.050492] env[65503]: DEBUG nova.network.neutron [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Updating instance_info_cache with network_info: [{"id": "099eaaaa-f2b2-44dc-98c7-be6801506e99", "address": "fa:16:3e:2a:14:5d", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099eaaaa-f2", "ovs_interfaceid": "099eaaaa-f2b2-44dc-98c7-be6801506e99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1072.053053] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b349d1b-c460-48ad-95eb-fbce7058474f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.075332] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1072.075332] env[65503]: value = "task-4450633" [ 1072.075332] env[65503]: _type = "Task" [ 1072.075332] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.085216] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.108752] env[65503]: DEBUG nova.policy [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1072.135351] env[65503]: INFO nova.compute.manager [-] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Took 1.52 seconds to deallocate network for instance. [ 1072.310228] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.310795] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e634ed6e-304d-4c06-a927-14db744e50e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.320954] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1072.320954] env[65503]: value = "task-4450634" [ 1072.320954] env[65503]: _type = "Task" [ 1072.320954] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.333540] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1072.333783] env[65503]: DEBUG nova.compute.manager [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1072.334696] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b6889f-3c2b-4d6e-aa25-af81a6964c0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.346170] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1072.346170] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527de60a-2a14-4dd6-b4c8-458b945ca46a" [ 1072.346170] env[65503]: _type = "HttpNfcLease" [ 1072.346170] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1072.348313] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.348595] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.348777] env[65503]: DEBUG nova.network.neutron [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1072.553545] env[65503]: DEBUG oslo_concurrency.lockutils [req-46c5acfb-b3fe-4d2f-899e-e4a070233cce req-8e6655ec-1f19-4ec8-89be-b7461c1ab54e service nova] Releasing lock "refresh_cache-7802baf8-04ff-4df4-90b0-71cb97dddc83" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.586894] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450633, 'name': ReconfigVM_Task, 'duration_secs': 0.290037} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.587427] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.588043] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2319a79c-8537-49d6-917d-bd50d67edc83 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.595537] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1072.595537] env[65503]: value = "task-4450635" [ 1072.595537] env[65503]: _type = "Task" [ 1072.595537] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.604739] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450635, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.643221] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.728900] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.339s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.731881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.089s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.732121] env[65503]: DEBUG nova.objects.instance [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'resources' on Instance uuid fcdcabb9-f076-4fa9-ac30-3220eb6064da {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.846628] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1072.846628] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527de60a-2a14-4dd6-b4c8-458b945ca46a" [ 1072.846628] env[65503]: _type = "HttpNfcLease" [ 1072.846628] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1072.851606] env[65503]: WARNING neutronclient.v2_0.client [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1072.852249] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1072.852605] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1072.977991] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1072.978435] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1073.016340] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1073.016685] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870464', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'name': 'volume-09387433-6551-4123-9495-cdd30655e030', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'serial': '09387433-6551-4123-9495-cdd30655e030'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1073.017970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10f056b-4c1e-41a9-b8c9-9756605e879f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.038072] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc769c49-0f86-4a4f-96ea-9c2243240db4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.041832] env[65503]: WARNING neutronclient.v2_0.client [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1073.042493] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1073.042897] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1073.077341] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] volume-09387433-6551-4123-9495-cdd30655e030/volume-09387433-6551-4123-9495-cdd30655e030.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.077625] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b92e5b8e-c535-4357-b45e-e63b652c6c38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.103238] env[65503]: DEBUG oslo_vmware.api [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1073.103238] env[65503]: value = "task-4450636" [ 1073.103238] env[65503]: _type = "Task" [ 1073.103238] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.106941] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450635, 'name': Rename_Task, 'duration_secs': 0.156545} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.110251] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1073.110554] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4c71eed-7663-4e0b-8147-d7bedbdc63ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.118657] env[65503]: DEBUG oslo_vmware.api [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450636, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.120225] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1073.120225] env[65503]: value = "task-4450637" [ 1073.120225] env[65503]: _type = "Task" [ 1073.120225] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.130427] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.137277] env[65503]: DEBUG nova.network.neutron [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96370c0c-da", "ovs_interfaceid": "96370c0c-da2e-4229-82a1-a24b799e6402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1073.298763] env[65503]: INFO nova.scheduler.client.report [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted allocation for migration 565057d4-ce23-4f92-ad47-1760ef87ee11 [ 1073.352542] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1073.352542] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527de60a-2a14-4dd6-b4c8-458b945ca46a" [ 1073.352542] env[65503]: _type = "HttpNfcLease" [ 1073.352542] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1073.353092] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1073.353092] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527de60a-2a14-4dd6-b4c8-458b945ca46a" [ 1073.353092] env[65503]: _type = "HttpNfcLease" [ 1073.353092] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1073.353716] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f3757e-95e1-4d3f-a0c3-4c7c8a681901 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.368047] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfa30f-c812-64c8-2fce-3983215f139b/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1073.368381] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfa30f-c812-64c8-2fce-3983215f139b/disk-0.vmdk. {{(pid=65503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1073.440027] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-241ca062-f660-412b-9ffa-21c913a9fec5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.497926] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c62408-4bbb-4255-b028-ee39570b09a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.508585] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6ce593-df47-4748-a7d3-abe3383fb5c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.560630] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13639361-8876-4859-ab04-ceee6d151be6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.572212] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6524b1e9-4beb-4999-9a24-6c4bbd0e082a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.593787] env[65503]: DEBUG nova.compute.provider_tree [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.618686] env[65503]: DEBUG oslo_vmware.api [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450636, 'name': ReconfigVM_Task, 'duration_secs': 0.47099} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.619092] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfigured VM instance instance-0000005b to attach disk [datastore1] volume-09387433-6551-4123-9495-cdd30655e030/volume-09387433-6551-4123-9495-cdd30655e030.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1073.624090] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6088ab26-0608-42e2-bbfa-b4cf2537e92e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.640418] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.640746] env[65503]: WARNING neutronclient.v2_0.client [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1073.641335] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1073.641774] env[65503]: WARNING openstack [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1073.648414] env[65503]: WARNING neutronclient.v2_0.client [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1073.661495] env[65503]: DEBUG oslo_vmware.api [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1073.661495] env[65503]: value = "task-4450638" [ 1073.661495] env[65503]: _type = "Task" [ 1073.661495] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.661814] env[65503]: DEBUG oslo_vmware.api [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450637, 'name': PowerOnVM_Task, 'duration_secs': 0.524905} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.664620] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.664859] env[65503]: INFO nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Took 7.85 seconds to spawn the instance on the hypervisor. [ 1073.665187] env[65503]: DEBUG nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1073.669638] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169bd528-bc64-44d6-a772-f34649ff88e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.682849] env[65503]: DEBUG oslo_vmware.api [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.701315] env[65503]: DEBUG nova.network.neutron [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Successfully updated port: 884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1073.805375] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.436s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.038600] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1074.039702] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d70d6b-3953-4349-8096-6e84507785cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.049192] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.049526] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98d7c9fd-bafe-4d7a-8460-33a5b7db0e15 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.098599] env[65503]: DEBUG nova.scheduler.client.report [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1074.122875] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.123158] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.123430] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleting the datastore file [datastore2] b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.123813] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-224f9473-0fee-46b0-b563-686606b9429d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.137420] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1074.137420] env[65503]: value = "task-4450640" [ 1074.137420] env[65503]: _type = "Task" [ 1074.137420] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.150600] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.177180] env[65503]: DEBUG oslo_vmware.api [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450638, 'name': ReconfigVM_Task, 'duration_secs': 0.184695} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.178919] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870464', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'name': 'volume-09387433-6551-4123-9495-cdd30655e030', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'serial': '09387433-6551-4123-9495-cdd30655e030'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1074.209320] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.209453] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.209572] env[65503]: DEBUG nova.network.neutron [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1074.216925] env[65503]: INFO nova.compute.manager [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Took 15.47 seconds to build instance. [ 1074.221329] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Completed reading data from the image iterator. {{(pid=65503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1074.221526] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfa30f-c812-64c8-2fce-3983215f139b/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1074.222929] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d07d443-7604-4398-9c8f-4df04f867d84 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.232891] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfa30f-c812-64c8-2fce-3983215f139b/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1074.233141] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfa30f-c812-64c8-2fce-3983215f139b/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1074.233391] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-90648a73-e308-4b9c-a766-90f696e8f0db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.437426] env[65503]: DEBUG oslo_vmware.rw_handles [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfa30f-c812-64c8-2fce-3983215f139b/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1074.437882] env[65503]: INFO nova.virt.vmwareapi.images [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Downloaded image file data 5b2e951c-3e9a-4d3c-b99d-569f5b89e872 [ 1074.438631] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0ac2f4-92fc-4c65-8879-3fb7466fefe4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.457182] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b9dd697-4d06-42a3-8ea8-cbcb0efbc30a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.483089] env[65503]: INFO nova.virt.vmwareapi.images [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] The imported VM was unregistered [ 1074.486104] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Caching image {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1074.486352] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating directory with path [datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.486691] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b72974d-53f0-469e-88d6-ecafc6d5c483 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.510972] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created directory with path [datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.511289] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d/OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d.vmdk to [datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872/5b2e951c-3e9a-4d3c-b99d-569f5b89e872.vmdk. {{(pid=65503) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1074.511595] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-23cdf2ae-5df5-400d-825b-660d470d2f19 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.519331] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1074.519331] env[65503]: value = "task-4450642" [ 1074.519331] env[65503]: _type = "Task" [ 1074.519331] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.529475] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450642, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.607140] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.645126] env[65503]: INFO nova.scheduler.client.report [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted allocations for instance fcdcabb9-f076-4fa9-ac30-3220eb6064da [ 1074.652941] env[65503]: DEBUG oslo_vmware.api [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245034} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.653445] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1074.653659] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1074.653830] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1074.672500] env[65503]: INFO nova.scheduler.client.report [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted allocations for instance b00a98a4-4865-4a02-a353-3d1da9ef0e51 [ 1074.715145] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1074.715649] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1074.725159] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39e1376b-5709-4200-936d-8ee9c99093fe tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.992s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.761449] env[65503]: WARNING nova.network.neutron [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] d2c28c8e-55de-416b-97e1-c5ea06e7f107 already exists in list: networks containing: ['d2c28c8e-55de-416b-97e1-c5ea06e7f107']. ignoring it [ 1074.799894] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1074.800303] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1074.865359] env[65503]: WARNING neutronclient.v2_0.client [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1074.866381] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1074.867054] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1074.886823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "bee97942-afb2-465f-9774-56f5aa8becca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.887112] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.887344] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "bee97942-afb2-465f-9774-56f5aa8becca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.887577] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.887784] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.889887] env[65503]: INFO nova.compute.manager [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Terminating instance [ 1074.893628] env[65503]: DEBUG nova.compute.manager [req-8b97298e-0b76-48c2-93e8-c7972f231a0b req-a4fd7ab7-ce85-4d6f-911d-fa34b7d5b209 service nova] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Received event network-vif-deleted-c1fca361-555c-407f-bd51-6ea779e02f3a {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1074.911733] env[65503]: DEBUG nova.compute.manager [req-a761f622-0d79-4fba-a663-f3dca7d44e72 req-7076ad50-cbd0-41b2-870c-f7c856ff6c97 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-vif-plugged-884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1074.911905] env[65503]: DEBUG oslo_concurrency.lockutils [req-a761f622-0d79-4fba-a663-f3dca7d44e72 req-7076ad50-cbd0-41b2-870c-f7c856ff6c97 service nova] Acquiring lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.912132] env[65503]: DEBUG oslo_concurrency.lockutils [req-a761f622-0d79-4fba-a663-f3dca7d44e72 req-7076ad50-cbd0-41b2-870c-f7c856ff6c97 service nova] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.912282] env[65503]: DEBUG oslo_concurrency.lockutils [req-a761f622-0d79-4fba-a663-f3dca7d44e72 req-7076ad50-cbd0-41b2-870c-f7c856ff6c97 service nova] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.912442] env[65503]: DEBUG nova.compute.manager [req-a761f622-0d79-4fba-a663-f3dca7d44e72 req-7076ad50-cbd0-41b2-870c-f7c856ff6c97 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] No waiting events found dispatching network-vif-plugged-884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1074.912600] env[65503]: WARNING nova.compute.manager [req-a761f622-0d79-4fba-a663-f3dca7d44e72 req-7076ad50-cbd0-41b2-870c-f7c856ff6c97 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received unexpected event network-vif-plugged-884998aa-5fa4-410b-87e8-843ac2653ab7 for instance with vm_state active and task_state None. [ 1075.002471] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1075.002471] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1075.030716] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450642, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.079720] env[65503]: WARNING neutronclient.v2_0.client [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1075.080559] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1075.080875] env[65503]: WARNING openstack [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1075.158186] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6e242cb4-6d84-4a31-8dce-ba3aaac8c018 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "fcdcabb9-f076-4fa9-ac30-3220eb6064da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.231s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.177781] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.178198] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.178438] env[65503]: DEBUG nova.objects.instance [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'resources' on Instance uuid b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.182484] env[65503]: DEBUG nova.network.neutron [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "884998aa-5fa4-410b-87e8-843ac2653ab7", "address": "fa:16:3e:e7:92:31", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884998aa-5f", "ovs_interfaceid": "884998aa-5fa4-410b-87e8-843ac2653ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1075.233072] env[65503]: DEBUG nova.objects.instance [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid f89ca00e-d54e-4040-bf18-9a5ec96378d5 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.396330] env[65503]: DEBUG nova.compute.manager [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1075.396658] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.397511] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b04358-1427-4e64-9599-54f36a4d7818 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.409402] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.409667] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8ae9a4f-627a-4062-98a8-f395364530fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.418592] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1075.418592] env[65503]: value = "task-4450643" [ 1075.418592] env[65503]: _type = "Task" [ 1075.418592] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.428202] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.531285] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450642, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.685761] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.686548] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.686702] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.687312] env[65503]: DEBUG nova.objects.instance [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'numa_topology' on Instance uuid b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.689731] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88a4860-bb3c-4fc3-964f-f1970ab14743 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.709414] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1075.709774] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1075.709873] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1075.710018] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1075.710294] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1075.710475] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1075.710688] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.710845] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1075.711015] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1075.711196] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1075.711371] env[65503]: DEBUG nova.virt.hardware [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1075.722908] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Reconfiguring VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1075.724742] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c99b74a-ec0c-4986-9f66-04a8d6104f03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.743385] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b9ef1f69-1667-4185-a626-1068cd399b8f tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.346s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.751873] env[65503]: DEBUG oslo_vmware.api [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1075.751873] env[65503]: value = "task-4450644" [ 1075.751873] env[65503]: _type = "Task" [ 1075.751873] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.762486] env[65503]: DEBUG oslo_vmware.api [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450644, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.930644] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.033701] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450642, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.194531] env[65503]: DEBUG nova.objects.base [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1076.268402] env[65503]: DEBUG oslo_vmware.api [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450644, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.430763] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450643, 'name': PowerOffVM_Task, 'duration_secs': 0.661563} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.431128] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.431593] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.431734] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc66d08e-a9a5-4c9f-ad74-7d8df7480372 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.437620] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177fb182-89b0-481a-beb4-1bc14024a96d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.450322] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2135809-0f16-4590-ae25-b2f613263bef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.491396] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d393ed0a-0bad-4d5e-9c12-66fac25d5c2b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.504045] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6e9ce9-8889-48d2-8137-b28668bdf72a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.524649] env[65503]: DEBUG nova.compute.provider_tree [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.537301] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.537660] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.539601] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleting the datastore file [datastore2] bee97942-afb2-465f-9774-56f5aa8becca {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.541615] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43d46740-39f7-4a18-8b9e-a968ac8281d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.543927] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450642, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.552905] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for the task: (returnval){ [ 1076.552905] env[65503]: value = "task-4450646" [ 1076.552905] env[65503]: _type = "Task" [ 1076.552905] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.566799] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.776239] env[65503]: DEBUG oslo_vmware.api [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450644, 'name': ReconfigVM_Task, 'duration_secs': 0.70058} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.776239] env[65503]: WARNING neutronclient.v2_0.client [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1076.776456] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.776690] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Reconfigured VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1077.030532] env[65503]: DEBUG nova.scheduler.client.report [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1077.038095] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450642, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.066209] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.283441] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0902f047-8966-45bf-a81a-da911fd7c876 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-1d1a96cc-63b3-472c-b94a-1ea00763f770-884998aa-5fa4-410b-87e8-843ac2653ab7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.430s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.342777] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.387700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "eb18fc5c-168b-4442-af66-15e255ecc535" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.387957] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.459739] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-changed-884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1077.459923] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing instance network info cache due to event network-changed-884998aa-5fa4-410b-87e8-843ac2653ab7. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1077.460193] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.460424] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.460668] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing network info cache for port 884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1077.536206] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450642, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.997217} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.536609] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d/OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d.vmdk to [datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872/5b2e951c-3e9a-4d3c-b99d-569f5b89e872.vmdk. [ 1077.537409] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Cleaning up location [datastore1] OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1077.537409] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_19e2b6e0-18d8-4787-88d3-8823b031184d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.537409] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cb85fa9-b4fc-4198-a00d-290469291df0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.540309] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.362s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.549403] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1077.549403] env[65503]: value = "task-4450647" [ 1077.549403] env[65503]: _type = "Task" [ 1077.549403] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.560605] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.566862] env[65503]: DEBUG oslo_vmware.api [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Task: {'id': task-4450646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.903227} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.567177] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.567398] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.567572] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.567743] env[65503]: INFO nova.compute.manager [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Took 2.17 seconds to destroy the instance on the hypervisor. [ 1077.568075] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1077.568292] env[65503]: DEBUG nova.compute.manager [-] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1077.568387] env[65503]: DEBUG nova.network.neutron [-] [instance: bee97942-afb2-465f-9774-56f5aa8becca] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1077.568640] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.569246] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1077.569515] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1077.745926] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.854493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.855393] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.894096] env[65503]: DEBUG nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1077.963550] env[65503]: WARNING neutronclient.v2_0.client [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.965250] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1077.965250] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1078.050049] env[65503]: DEBUG oslo_concurrency.lockutils [None req-aea0f082-2eaa-4c70-bd32-3a281b68b335 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.339s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.051869] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.709s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.052597] env[65503]: INFO nova.compute.manager [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Unshelving [ 1078.067982] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072621} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.070534] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1078.070774] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872/5b2e951c-3e9a-4d3c-b99d-569f5b89e872.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.071112] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872/5b2e951c-3e9a-4d3c-b99d-569f5b89e872.vmdk to [datastore1] 7802baf8-04ff-4df4-90b0-71cb97dddc83/7802baf8-04ff-4df4-90b0-71cb97dddc83.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1078.077497] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf39f27a-0335-4fa0-886d-7c5838b08ac8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.088093] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1078.088093] env[65503]: value = "task-4450648" [ 1078.088093] env[65503]: _type = "Task" [ 1078.088093] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.099729] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450648, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.101236] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1078.102324] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1078.166596] env[65503]: WARNING neutronclient.v2_0.client [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1078.167425] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1078.167772] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1078.254385] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updated VIF entry in instance network info cache for port 884998aa-5fa4-410b-87e8-843ac2653ab7. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1078.254919] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "884998aa-5fa4-410b-87e8-843ac2653ab7", "address": "fa:16:3e:e7:92:31", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884998aa-5f", "ovs_interfaceid": "884998aa-5fa4-410b-87e8-843ac2653ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1078.360101] env[65503]: DEBUG nova.compute.utils [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1078.426911] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.427301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.428874] env[65503]: INFO nova.compute.claims [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1078.431465] env[65503]: DEBUG nova.network.neutron [-] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1078.599527] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450648, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.758125] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.758336] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-vif-unplugged-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1078.758548] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.758767] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.758931] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.759111] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] No waiting events found dispatching network-vif-unplugged-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1078.759377] env[65503]: WARNING nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received unexpected event network-vif-unplugged-96370c0c-da2e-4229-82a1-a24b799e6402 for instance with vm_state shelved_offloaded and task_state unshelving. [ 1078.759565] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1078.759779] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing instance network info cache due to event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1078.759955] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Acquiring lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.760296] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Acquired lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.760568] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1078.863712] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.939397] env[65503]: INFO nova.compute.manager [-] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Took 1.37 seconds to deallocate network for instance. [ 1079.070826] env[65503]: DEBUG nova.compute.utils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.081171] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "bc700871-233c-4aa0-867e-4f166b6f54d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.081518] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.099780] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450648, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.264122] env[65503]: WARNING neutronclient.v2_0.client [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1079.264949] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1079.265835] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1079.381925] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-1d1a96cc-63b3-472c-b94a-1ea00763f770-884998aa-5fa4-410b-87e8-843ac2653ab7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.382254] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-1d1a96cc-63b3-472c-b94a-1ea00763f770-884998aa-5fa4-410b-87e8-843ac2653ab7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.438432] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1079.438828] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1079.450802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.497297] env[65503]: DEBUG nova.compute.manager [req-03a304e1-e259-40a6-9359-1e20177b7439 req-f004aa2b-238a-48bf-9fa1-0945e186a2b8 service nova] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Received event network-vif-deleted-7309ef3a-1ef4-4d05-a35b-8aecb1167266 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1079.526488] env[65503]: WARNING neutronclient.v2_0.client [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1079.527206] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1079.527606] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1079.574575] env[65503]: INFO nova.virt.block_device [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Booting with volume 1dc74fb4-8c72-4626-9e0a-9dad8090a4ba at /dev/sdb [ 1079.586566] env[65503]: DEBUG nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1079.603805] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450648, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.629046] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adf63360-a39f-4b06-89a2-8da6225f194b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.645868] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e7c638-b75f-4ac4-87e6-398a4b352169 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.686737] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bee627a-9529-45a4-bbd1-5234a92ea27c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.691096] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updated VIF entry in instance network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1079.691555] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap96370c0c-da", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1079.703521] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4870feb-7e44-456b-abd6-5490c3d1446d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.752068] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3a4838-b0ec-4b10-a4e8-3e591f6d27fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.760628] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d45588-087d-4195-96cc-d1ead86a6de2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.765621] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a63d71d-361e-45de-82f2-a5f40c917c72 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.776652] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdcc0f9-2771-4c81-affe-21410ea7b499 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.784258] env[65503]: DEBUG nova.virt.block_device [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating existing volume attachment record: 29168f2a-d51e-4c9c-8abb-c97edd9e5ceb {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1079.821015] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ffd17c-6e57-4787-a625-5cf97b4c4c2d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.833733] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6339aad6-a91a-41b9-bc77-cc1b59f062fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.852505] env[65503]: DEBUG nova.compute.provider_tree [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.887631] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.887837] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.888872] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ad4479-614e-4fbf-8a37-b742a8f2735e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.911781] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae68219-e7ce-4334-a26a-2fb8037ce266 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.935323] env[65503]: WARNING neutronclient.v2_0.client [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1079.940960] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Reconfiguring VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1079.941398] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da5476b0-bc19-440e-8ae9-515678e53bb7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.955735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.956089] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.956345] env[65503]: INFO nova.compute.manager [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Attaching volume 48fef821-c720-4862-96cf-43e3f851654f to /dev/sdc [ 1079.966481] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1079.966481] env[65503]: value = "task-4450650" [ 1079.966481] env[65503]: _type = "Task" [ 1079.966481] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.981173] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.992058] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3548ec3a-3b4c-427d-a1c1-ff66c69de54a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.003122] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f44e70-b66f-488a-8b9a-01aa1b9734ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.021206] env[65503]: DEBUG nova.virt.block_device [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updating existing volume attachment record: d18da54e-7454-41f7-b386-6c9c2dd2be7f {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1080.110931] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450648, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.115653] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.195626] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Releasing lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.196129] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Received event network-changed-f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1080.196334] env[65503]: DEBUG nova.compute.manager [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Refreshing instance network info cache due to event network-changed-f8cdf0ee-e818-41cd-b8aa-b485ea737879. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1080.196558] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Acquiring lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.196699] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Acquired lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.196961] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Refreshing network info cache for port f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1080.356102] env[65503]: DEBUG nova.scheduler.client.report [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.479765] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.607341] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450648, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.703248] env[65503]: WARNING neutronclient.v2_0.client [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1080.704039] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1080.704502] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1080.845229] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1080.845558] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1080.862958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.863503] env[65503]: DEBUG nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1080.866556] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.416s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.866708] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.868850] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.753s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.870384] env[65503]: INFO nova.compute.claims [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1080.906686] env[65503]: INFO nova.scheduler.client.report [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Deleted allocations for instance bee97942-afb2-465f-9774-56f5aa8becca [ 1080.923985] env[65503]: WARNING neutronclient.v2_0.client [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1080.924737] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1080.925178] env[65503]: WARNING openstack [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1080.984635] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.018516] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updated VIF entry in instance network info cache for port f8cdf0ee-e818-41cd-b8aa-b485ea737879. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1081.019097] env[65503]: DEBUG nova.network.neutron [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1081.105707] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450648, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.827167} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.106089] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/5b2e951c-3e9a-4d3c-b99d-569f5b89e872/5b2e951c-3e9a-4d3c-b99d-569f5b89e872.vmdk to [datastore1] 7802baf8-04ff-4df4-90b0-71cb97dddc83/7802baf8-04ff-4df4-90b0-71cb97dddc83.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1081.106961] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c9a303-c536-432a-badf-c2317c44236e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.129750] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 7802baf8-04ff-4df4-90b0-71cb97dddc83/7802baf8-04ff-4df4-90b0-71cb97dddc83.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.130125] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a57a0c7b-1cb3-4816-8fb9-70e5c12998b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.150392] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1081.150392] env[65503]: value = "task-4450654" [ 1081.150392] env[65503]: _type = "Task" [ 1081.150392] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.162081] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450654, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.375571] env[65503]: DEBUG nova.compute.utils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1081.379970] env[65503]: DEBUG nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1081.379970] env[65503]: DEBUG nova.network.neutron [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1081.379970] env[65503]: WARNING neutronclient.v2_0.client [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1081.380578] env[65503]: WARNING neutronclient.v2_0.client [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1081.380993] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1081.381404] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1081.416745] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6c95911c-afe4-4c0c-8dd9-37fa0439393f tempest-DeleteServersTestJSON-142789765 tempest-DeleteServersTestJSON-142789765-project-member] Lock "bee97942-afb2-465f-9774-56f5aa8becca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.530s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.439775] env[65503]: DEBUG nova.policy [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9955668c2464ddfb0eae34aa700ddd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '521d40776571452e85178972f97c8622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1081.478383] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.521833] env[65503]: DEBUG oslo_concurrency.lockutils [req-5d5b3d8b-e9a8-4a07-80f0-2ca7b91d1534 req-69e55413-3dec-4d73-8142-1ecdd322eaec service nova] Releasing lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.661352] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450654, 'name': ReconfigVM_Task, 'duration_secs': 0.324047} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.661743] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 7802baf8-04ff-4df4-90b0-71cb97dddc83/7802baf8-04ff-4df4-90b0-71cb97dddc83.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.662311] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f837d6b-f199-4d19-b521-fc0c99f4ffc0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.669611] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1081.669611] env[65503]: value = "task-4450655" [ 1081.669611] env[65503]: _type = "Task" [ 1081.669611] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.678983] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450655, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.759892] env[65503]: DEBUG nova.network.neutron [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Successfully created port: 0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1081.880300] env[65503]: DEBUG nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1081.982224] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.113804] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bfc05f-a570-46d6-a577-194d2fadfb44 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.126163] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6016fc15-6d9b-4167-a084-44cb13866f4c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.166742] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbbb4b4-f4e0-4af4-b1dc-1e5594bd6fa7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.178694] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ecefc5-5ec5-4ad5-bdc0-c2ee7aa3f07e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.186413] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450655, 'name': Rename_Task, 'duration_secs': 0.149002} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.187244] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.187638] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4be3b20-d376-4a6e-b31d-ad918b40ac73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.198102] env[65503]: DEBUG nova.compute.provider_tree [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.208964] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1082.208964] env[65503]: value = "task-4450656" [ 1082.208964] env[65503]: _type = "Task" [ 1082.208964] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.219193] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.479847] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.703666] env[65503]: DEBUG nova.scheduler.client.report [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.720028] env[65503]: DEBUG oslo_vmware.api [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450656, 'name': PowerOnVM_Task, 'duration_secs': 0.488813} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.720360] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1082.720555] env[65503]: INFO nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Took 14.86 seconds to spawn the instance on the hypervisor. [ 1082.720727] env[65503]: DEBUG nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1082.721610] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb25be3-0ecc-4d6f-be50-e524a489a7e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.891377] env[65503]: DEBUG nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1082.918365] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1082.918593] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1082.918745] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1082.918923] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1082.919076] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1082.919224] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1082.919478] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1082.919640] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1082.919804] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1082.919962] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1082.920144] env[65503]: DEBUG nova.virt.hardware [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1082.921135] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255d7884-543d-4da0-8281-a5663c46a71b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.929650] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c464504d-362d-41ef-9ad4-e782e49b40bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.979571] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.171088] env[65503]: DEBUG nova.compute.manager [req-4f630535-4229-4426-9d39-2987d53011a3 req-fafe741f-dd21-464e-9ee3-9031ab194615 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Received event network-vif-plugged-0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1083.171312] env[65503]: DEBUG oslo_concurrency.lockutils [req-4f630535-4229-4426-9d39-2987d53011a3 req-fafe741f-dd21-464e-9ee3-9031ab194615 service nova] Acquiring lock "eb18fc5c-168b-4442-af66-15e255ecc535-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.171526] env[65503]: DEBUG oslo_concurrency.lockutils [req-4f630535-4229-4426-9d39-2987d53011a3 req-fafe741f-dd21-464e-9ee3-9031ab194615 service nova] Lock "eb18fc5c-168b-4442-af66-15e255ecc535-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.171690] env[65503]: DEBUG oslo_concurrency.lockutils [req-4f630535-4229-4426-9d39-2987d53011a3 req-fafe741f-dd21-464e-9ee3-9031ab194615 service nova] Lock "eb18fc5c-168b-4442-af66-15e255ecc535-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.171850] env[65503]: DEBUG nova.compute.manager [req-4f630535-4229-4426-9d39-2987d53011a3 req-fafe741f-dd21-464e-9ee3-9031ab194615 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] No waiting events found dispatching network-vif-plugged-0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1083.172623] env[65503]: WARNING nova.compute.manager [req-4f630535-4229-4426-9d39-2987d53011a3 req-fafe741f-dd21-464e-9ee3-9031ab194615 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Received unexpected event network-vif-plugged-0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 for instance with vm_state building and task_state spawning. [ 1083.209439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.210199] env[65503]: DEBUG nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1083.243096] env[65503]: INFO nova.compute.manager [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Took 21.71 seconds to build instance. [ 1083.270652] env[65503]: DEBUG nova.network.neutron [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Successfully updated port: 0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1083.483430] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.715962] env[65503]: DEBUG nova.compute.utils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1083.717403] env[65503]: DEBUG nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1083.717692] env[65503]: DEBUG nova.network.neutron [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1083.717938] env[65503]: WARNING neutronclient.v2_0.client [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1083.718280] env[65503]: WARNING neutronclient.v2_0.client [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1083.718857] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1083.719214] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1083.745396] env[65503]: DEBUG oslo_concurrency.lockutils [None req-384a87e4-fe74-453e-b9a9-5d76a8889a6a tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.218s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.774730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-eb18fc5c-168b-4442-af66-15e255ecc535" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.774997] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-eb18fc5c-168b-4442-af66-15e255ecc535" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.775198] env[65503]: DEBUG nova.network.neutron [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1083.780347] env[65503]: DEBUG nova.policy [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e193d8d730e14c348b38c407f58cdc56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34e8cd66745a40d2acebbce98050ee5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1083.982220] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.070639] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "7802baf8-04ff-4df4-90b0-71cb97dddc83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.071026] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.071139] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "7802baf8-04ff-4df4-90b0-71cb97dddc83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.071353] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.071535] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.073899] env[65503]: INFO nova.compute.manager [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Terminating instance [ 1084.126921] env[65503]: DEBUG nova.network.neutron [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Successfully created port: 61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1084.229361] env[65503]: DEBUG nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1084.284509] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1084.284985] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1084.324267] env[65503]: DEBUG nova.network.neutron [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1084.347691] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1084.347868] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1084.486403] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.545723] env[65503]: WARNING neutronclient.v2_0.client [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1084.546521] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1084.546904] env[65503]: WARNING openstack [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1084.576856] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1084.577188] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870469', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'name': 'volume-48fef821-c720-4862-96cf-43e3f851654f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'serial': '48fef821-c720-4862-96cf-43e3f851654f'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1084.578167] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483e706d-fdb2-47b7-a62d-e84c31c76f47 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.581435] env[65503]: DEBUG nova.compute.manager [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1084.581640] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1084.582443] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566aafcf-8c22-42a7-9cf2-a1fe63056b93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.604137] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1084.604137] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-586c5205-72c4-41b5-99b2-26caaea8cfff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.606348] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acda6ff0-8073-46ee-8b4d-5fc61b060cc1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.640483] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] volume-48fef821-c720-4862-96cf-43e3f851654f/volume-48fef821-c720-4862-96cf-43e3f851654f.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.647914] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-898244be-b2db-4a4f-80ba-14c98a60e3f2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.661921] env[65503]: DEBUG oslo_vmware.api [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1084.661921] env[65503]: value = "task-4450659" [ 1084.661921] env[65503]: _type = "Task" [ 1084.661921] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.668837] env[65503]: DEBUG oslo_vmware.api [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1084.668837] env[65503]: value = "task-4450660" [ 1084.668837] env[65503]: _type = "Task" [ 1084.668837] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.671869] env[65503]: DEBUG oslo_vmware.api [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.682890] env[65503]: DEBUG oslo_vmware.api [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450660, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.749758] env[65503]: DEBUG nova.network.neutron [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Updating instance_info_cache with network_info: [{"id": "0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80", "address": "fa:16:3e:74:7a:ef", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dfdce64-2c", "ovs_interfaceid": "0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1084.982293] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.172686] env[65503]: DEBUG oslo_vmware.api [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450659, 'name': PowerOffVM_Task, 'duration_secs': 0.200972} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.175858] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.176084] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1085.176359] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d07bdbd4-b783-45b2-8afa-e7f205bdfc4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.184240] env[65503]: DEBUG oslo_vmware.api [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450660, 'name': ReconfigVM_Task, 'duration_secs': 0.379976} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.184593] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfigured VM instance instance-0000005b to attach disk [datastore2] volume-48fef821-c720-4862-96cf-43e3f851654f/volume-48fef821-c720-4862-96cf-43e3f851654f.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.189461] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cbe9f61-4643-4471-a47b-9860c22ae052 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.206039] env[65503]: DEBUG oslo_vmware.api [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1085.206039] env[65503]: value = "task-4450663" [ 1085.206039] env[65503]: _type = "Task" [ 1085.206039] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.211757] env[65503]: DEBUG nova.compute.manager [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Received event network-changed-0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1085.211954] env[65503]: DEBUG nova.compute.manager [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Refreshing instance network info cache due to event network-changed-0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1085.212172] env[65503]: DEBUG oslo_concurrency.lockutils [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Acquiring lock "refresh_cache-eb18fc5c-168b-4442-af66-15e255ecc535" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.218501] env[65503]: DEBUG oslo_vmware.api [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450663, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.239338] env[65503]: DEBUG nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1085.243548] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1085.243590] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1085.243816] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleting the datastore file [datastore1] 7802baf8-04ff-4df4-90b0-71cb97dddc83 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1085.244192] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39b83939-74d0-497a-8ce6-9e89ff600e21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.252216] env[65503]: DEBUG oslo_vmware.api [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1085.252216] env[65503]: value = "task-4450664" [ 1085.252216] env[65503]: _type = "Task" [ 1085.252216] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.252888] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-eb18fc5c-168b-4442-af66-15e255ecc535" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.253498] env[65503]: DEBUG nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Instance network_info: |[{"id": "0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80", "address": "fa:16:3e:74:7a:ef", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dfdce64-2c", "ovs_interfaceid": "0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1085.257923] env[65503]: DEBUG oslo_concurrency.lockutils [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Acquired lock "refresh_cache-eb18fc5c-168b-4442-af66-15e255ecc535" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.258194] env[65503]: DEBUG nova.network.neutron [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Refreshing network info cache for port 0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1085.259642] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:7a:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a7ba8d0-0208-4af7-af44-2a5ad382f9be', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1085.268418] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1085.271612] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1085.273268] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7faeec8-525b-458f-ba2e-25a084510c51 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.296688] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1085.297117] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1085.297269] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1085.297474] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1085.297688] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1085.297896] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1085.298283] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1085.298458] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1085.298684] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1085.298884] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1085.299145] env[65503]: DEBUG nova.virt.hardware [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1085.300308] env[65503]: DEBUG oslo_vmware.api [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.301239] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240bd426-f85d-4a69-aa75-b9c0c1833e2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.314202] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ddf02a-0a8f-47c2-b62c-cc59e8b29e6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.319353] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1085.319353] env[65503]: value = "task-4450665" [ 1085.319353] env[65503]: _type = "Task" [ 1085.319353] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.341667] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450665, 'name': CreateVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.485743] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.603872] env[65503]: DEBUG nova.compute.manager [req-a75001ec-062e-4f64-9354-43f00a4356c5 req-2bc5e41b-3ecc-42b8-a5e0-cf746a5aca29 service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Received event network-vif-plugged-61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1085.604193] env[65503]: DEBUG oslo_concurrency.lockutils [req-a75001ec-062e-4f64-9354-43f00a4356c5 req-2bc5e41b-3ecc-42b8-a5e0-cf746a5aca29 service nova] Acquiring lock "bc700871-233c-4aa0-867e-4f166b6f54d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.605288] env[65503]: DEBUG oslo_concurrency.lockutils [req-a75001ec-062e-4f64-9354-43f00a4356c5 req-2bc5e41b-3ecc-42b8-a5e0-cf746a5aca29 service nova] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.605567] env[65503]: DEBUG oslo_concurrency.lockutils [req-a75001ec-062e-4f64-9354-43f00a4356c5 req-2bc5e41b-3ecc-42b8-a5e0-cf746a5aca29 service nova] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.606068] env[65503]: DEBUG nova.compute.manager [req-a75001ec-062e-4f64-9354-43f00a4356c5 req-2bc5e41b-3ecc-42b8-a5e0-cf746a5aca29 service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] No waiting events found dispatching network-vif-plugged-61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1085.606068] env[65503]: WARNING nova.compute.manager [req-a75001ec-062e-4f64-9354-43f00a4356c5 req-2bc5e41b-3ecc-42b8-a5e0-cf746a5aca29 service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Received unexpected event network-vif-plugged-61496b79-7af0-4518-be7a-0b0c270e3eff for instance with vm_state building and task_state spawning. [ 1085.715956] env[65503]: DEBUG nova.network.neutron [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Successfully updated port: 61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1085.723980] env[65503]: DEBUG oslo_vmware.api [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450663, 'name': ReconfigVM_Task, 'duration_secs': 0.168397} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.724681] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870469', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'name': 'volume-48fef821-c720-4862-96cf-43e3f851654f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'serial': '48fef821-c720-4862-96cf-43e3f851654f'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1085.765967] env[65503]: DEBUG oslo_vmware.api [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176785} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.766287] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.766445] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1085.766614] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1085.766786] env[65503]: INFO nova.compute.manager [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1085.767047] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1085.767287] env[65503]: DEBUG nova.compute.manager [-] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1085.767386] env[65503]: DEBUG nova.network.neutron [-] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1085.767635] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.768228] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1085.768571] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1085.775870] env[65503]: WARNING neutronclient.v2_0.client [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.776514] env[65503]: WARNING openstack [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1085.776864] env[65503]: WARNING openstack [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1085.808996] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.831241] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450665, 'name': CreateVM_Task, 'duration_secs': 0.386396} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.831446] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1085.832049] env[65503]: WARNING neutronclient.v2_0.client [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.832370] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.832520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.832891] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1085.833200] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b32700c-acd0-4355-93d4-31d91a5b4704 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.839066] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1085.839066] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527099ff-e0dc-571f-305c-c033a05885a8" [ 1085.839066] env[65503]: _type = "Task" [ 1085.839066] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.849048] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527099ff-e0dc-571f-305c-c033a05885a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.912763] env[65503]: WARNING openstack [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1085.913251] env[65503]: WARNING openstack [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1085.922901] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.923182] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.923391] env[65503]: DEBUG nova.objects.instance [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'pci_requests' on Instance uuid b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.983764] env[65503]: DEBUG oslo_vmware.api [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450650, 'name': ReconfigVM_Task, 'duration_secs': 5.810234} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.988952] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.989473] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Reconfigured VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1085.989604] env[65503]: WARNING neutronclient.v2_0.client [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.989902] env[65503]: WARNING neutronclient.v2_0.client [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.990482] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1085.990802] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.033888] env[65503]: WARNING neutronclient.v2_0.client [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1086.036789] env[65503]: WARNING neutronclient.v2_0.client [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1086.037415] env[65503]: WARNING openstack [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.037813] env[65503]: WARNING openstack [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.121730] env[65503]: DEBUG nova.network.neutron [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Updated VIF entry in instance network info cache for port 0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1086.122156] env[65503]: DEBUG nova.network.neutron [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Updating instance_info_cache with network_info: [{"id": "0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80", "address": "fa:16:3e:74:7a:ef", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dfdce64-2c", "ovs_interfaceid": "0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1086.226369] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.226369] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.226369] env[65503]: DEBUG nova.network.neutron [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1086.351673] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527099ff-e0dc-571f-305c-c033a05885a8, 'name': SearchDatastore_Task, 'duration_secs': 0.010236} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.352018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.352266] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1086.352497] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.352637] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.352808] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.353107] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bab7e486-e430-44b5-b7d4-09bc2e926d1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.362901] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1086.363118] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1086.363887] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e6070f5-e68b-4725-92ca-ed8609f3e33d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.370146] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1086.370146] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529590f0-c800-8d18-a05d-f607bc1f7483" [ 1086.370146] env[65503]: _type = "Task" [ 1086.370146] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.379960] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529590f0-c800-8d18-a05d-f607bc1f7483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.427456] env[65503]: DEBUG nova.objects.instance [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'numa_topology' on Instance uuid b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.506312] env[65503]: DEBUG nova.network.neutron [-] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1086.625577] env[65503]: DEBUG oslo_concurrency.lockutils [req-cdf87605-94d7-4b47-81c9-7c10735b8346 req-5020d2f3-9115-4385-a4b8-a28973c1a6b9 service nova] Releasing lock "refresh_cache-eb18fc5c-168b-4442-af66-15e255ecc535" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.731182] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.731605] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.779625] env[65503]: DEBUG nova.network.neutron [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1086.784052] env[65503]: DEBUG nova.objects.instance [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid f89ca00e-d54e-4040-bf18-9a5ec96378d5 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.812192] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.812192] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.882189] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529590f0-c800-8d18-a05d-f607bc1f7483, 'name': SearchDatastore_Task, 'duration_secs': 0.009243} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.883053] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75a8cd05-9ea7-4994-92c7-aee34193b93a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.894796] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1086.894796] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a230c2-665b-fa9d-cc19-5fd9477fbee2" [ 1086.894796] env[65503]: _type = "Task" [ 1086.894796] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.904892] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a230c2-665b-fa9d-cc19-5fd9477fbee2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.925353] env[65503]: WARNING neutronclient.v2_0.client [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1086.926179] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.926423] env[65503]: WARNING openstack [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.934634] env[65503]: INFO nova.compute.claims [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.009049] env[65503]: INFO nova.compute.manager [-] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Took 1.24 seconds to deallocate network for instance. [ 1087.024078] env[65503]: DEBUG nova.network.neutron [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updating instance_info_cache with network_info: [{"id": "61496b79-7af0-4518-be7a-0b0c270e3eff", "address": "fa:16:3e:5e:cf:59", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61496b79-7a", "ovs_interfaceid": "61496b79-7af0-4518-be7a-0b0c270e3eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1087.239083] env[65503]: DEBUG nova.compute.manager [req-ff53167c-be1c-4543-bf01-87b8d735e5c3 req-3851c875-14a3-4e60-b3fc-bf0c94cc3243 service nova] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Received event network-vif-deleted-099eaaaa-f2b2-44dc-98c7-be6801506e99 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1087.289925] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db19738b-8b64-462a-9a4b-354d82cea972 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.334s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.337399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.337749] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.337781] env[65503]: DEBUG nova.network.neutron [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1087.406522] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a230c2-665b-fa9d-cc19-5fd9477fbee2, 'name': SearchDatastore_Task, 'duration_secs': 0.01092} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.406802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.407086] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1087.407368] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e13f3d6-c4cc-4128-bc01-cefeb5a91930 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.414900] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1087.414900] env[65503]: value = "task-4450666" [ 1087.414900] env[65503]: _type = "Task" [ 1087.414900] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.424492] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.515936] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.527584] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.527991] env[65503]: DEBUG nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Instance network_info: |[{"id": "61496b79-7af0-4518-be7a-0b0c270e3eff", "address": "fa:16:3e:5e:cf:59", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61496b79-7a", "ovs_interfaceid": "61496b79-7af0-4518-be7a-0b0c270e3eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1087.528515] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:cf:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61496b79-7af0-4518-be7a-0b0c270e3eff', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.536288] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1087.536534] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.536775] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a10ad17d-0234-4643-a242-250ecf19b92c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.560535] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.560535] env[65503]: value = "task-4450667" [ 1087.560535] env[65503]: _type = "Task" [ 1087.560535] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.569886] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450667, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.592383] env[65503]: DEBUG oslo_concurrency.lockutils [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.592657] env[65503]: DEBUG oslo_concurrency.lockutils [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.642679] env[65503]: DEBUG nova.compute.manager [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Received event network-changed-61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1087.642909] env[65503]: DEBUG nova.compute.manager [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Refreshing instance network info cache due to event network-changed-61496b79-7af0-4518-be7a-0b0c270e3eff. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1087.643154] env[65503]: DEBUG oslo_concurrency.lockutils [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Acquiring lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.643525] env[65503]: DEBUG oslo_concurrency.lockutils [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Acquired lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.643525] env[65503]: DEBUG nova.network.neutron [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Refreshing network info cache for port 61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1087.840959] env[65503]: WARNING neutronclient.v2_0.client [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1087.841765] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1087.842138] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1087.927035] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474563} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.927332] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1087.927549] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1087.927815] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2320f3c9-8a02-4fda-8670-514739cbca32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.938146] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1087.938146] env[65503]: value = "task-4450668" [ 1087.938146] env[65503]: _type = "Task" [ 1087.938146] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.954960] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.073086] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450667, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.098472] env[65503]: INFO nova.compute.manager [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Detaching volume 09387433-6551-4123-9495-cdd30655e030 [ 1088.140891] env[65503]: INFO nova.virt.block_device [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Attempting to driver detach volume 09387433-6551-4123-9495-cdd30655e030 from mountpoint /dev/sdb [ 1088.141150] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1088.141341] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870464', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'name': 'volume-09387433-6551-4123-9495-cdd30655e030', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'serial': '09387433-6551-4123-9495-cdd30655e030'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1088.142224] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90a0a72-af7b-4add-bca3-57ee0401adbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.147484] env[65503]: WARNING neutronclient.v2_0.client [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.147770] env[65503]: WARNING openstack [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.148470] env[65503]: WARNING openstack [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.182272] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b52a51-8442-4d7a-bcda-4576655f5a09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.190938] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e1d035-8449-4670-986e-494ce83425a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.222042] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2336e4c-4161-454c-b2ed-04dd0260f107 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.225062] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a32578-3763-42da-95de-dedf5cbd48c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.248616] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7044ca2a-747d-47bb-bf3d-756cbe36f5ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.252567] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The volume has not been displaced from its original location: [datastore1] volume-09387433-6551-4123-9495-cdd30655e030/volume-09387433-6551-4123-9495-cdd30655e030.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1088.258218] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfiguring VM instance instance-0000005b to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1088.258273] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e5750cc-7ed6-4f3f-838d-41222dc0dd97 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.302342] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea7844e-8ac0-49d9-a830-47655b7a48b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.305450] env[65503]: DEBUG oslo_vmware.api [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1088.305450] env[65503]: value = "task-4450669" [ 1088.305450] env[65503]: _type = "Task" [ 1088.305450] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.317263] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d29f37f-e2c8-403a-8a46-d0f3a05cdf88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.325595] env[65503]: DEBUG oslo_vmware.api [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450669, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.336997] env[65503]: DEBUG nova.compute.provider_tree [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.340013] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.340365] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.455403] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077691} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.455736] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1088.456577] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65efa567-452e-460b-8044-994de4354788 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.479736] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.480423] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdd0e7e3-efe9-4eaf-b6e0-c9b77483d510 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.497753] env[65503]: WARNING openstack [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.498457] env[65503]: WARNING openstack [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.513887] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1088.513887] env[65503]: value = "task-4450670" [ 1088.513887] env[65503]: _type = "Task" [ 1088.513887] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.523998] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.549176] env[65503]: WARNING neutronclient.v2_0.client [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.549908] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.550521] env[65503]: WARNING openstack [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.574268] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450667, 'name': CreateVM_Task, 'duration_secs': 0.553418} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.574268] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.574692] env[65503]: WARNING neutronclient.v2_0.client [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.575823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.575823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.575823] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.575993] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f1472b1-6ff5-4fce-bef0-5579bc0dd7cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.581672] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1088.581672] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e7eb37-882d-237b-9952-fb1a0d0498f2" [ 1088.581672] env[65503]: _type = "Task" [ 1088.581672] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.592232] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e7eb37-882d-237b-9952-fb1a0d0498f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.693817] env[65503]: WARNING neutronclient.v2_0.client [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.694480] env[65503]: WARNING openstack [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.694863] env[65503]: WARNING openstack [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.725145] env[65503]: INFO nova.network.neutron [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Port 884998aa-5fa4-410b-87e8-843ac2653ab7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1088.725516] env[65503]: DEBUG nova.network.neutron [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1088.805546] env[65503]: DEBUG nova.network.neutron [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updated VIF entry in instance network info cache for port 61496b79-7af0-4518-be7a-0b0c270e3eff. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1088.805937] env[65503]: DEBUG nova.network.neutron [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updating instance_info_cache with network_info: [{"id": "61496b79-7af0-4518-be7a-0b0c270e3eff", "address": "fa:16:3e:5e:cf:59", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61496b79-7a", "ovs_interfaceid": "61496b79-7af0-4518-be7a-0b0c270e3eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1088.817312] env[65503]: DEBUG oslo_vmware.api [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450669, 'name': ReconfigVM_Task, 'duration_secs': 0.288516} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.818187] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfigured VM instance instance-0000005b to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1088.823026] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b3761b7-16a8-44a5-bb73-0dd1b309c242 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.840692] env[65503]: DEBUG oslo_vmware.api [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1088.840692] env[65503]: value = "task-4450671" [ 1088.840692] env[65503]: _type = "Task" [ 1088.840692] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.850852] env[65503]: DEBUG nova.scheduler.client.report [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1088.854512] env[65503]: DEBUG oslo_vmware.api [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450671, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.026408] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450670, 'name': ReconfigVM_Task, 'duration_secs': 0.301275} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.026764] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Reconfigured VM instance instance-00000066 to attach disk [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.028031] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d49c4ee-510c-475d-b17f-b143900a7588 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.036102] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1089.036102] env[65503]: value = "task-4450672" [ 1089.036102] env[65503]: _type = "Task" [ 1089.036102] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.045787] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450672, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.095107] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e7eb37-882d-237b-9952-fb1a0d0498f2, 'name': SearchDatastore_Task, 'duration_secs': 0.01255} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.095455] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.095691] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.095931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.096081] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.096406] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.096704] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a9adfdc-e065-4b22-9864-99c3fe776ac2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.104147] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.104778] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.110629] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.110841] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.111704] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcdab193-7af9-450a-98ec-d9567469d4bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.121425] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1089.121425] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52488ee1-7686-97be-06a8-77232316abf0" [ 1089.121425] env[65503]: _type = "Task" [ 1089.121425] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.131743] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52488ee1-7686-97be-06a8-77232316abf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.175633] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-f71dca10-3b68-4f1e-868e-a8c8271f7c88-884998aa-5fa4-410b-87e8-843ac2653ab7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.176030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-f71dca10-3b68-4f1e-868e-a8c8271f7c88-884998aa-5fa4-410b-87e8-843ac2653ab7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.176408] env[65503]: DEBUG nova.objects.instance [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'flavor' on Instance uuid f71dca10-3b68-4f1e-868e-a8c8271f7c88 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.228955] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.309601] env[65503]: DEBUG oslo_concurrency.lockutils [req-549e99a7-3a4d-47c7-ae82-0235e26492ee req-22f06ed6-3ce0-469c-b876-c560e43ac3ef service nova] Releasing lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.351990] env[65503]: DEBUG oslo_vmware.api [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450671, 'name': ReconfigVM_Task, 'duration_secs': 0.154685} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.352352] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870464', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'name': 'volume-09387433-6551-4123-9495-cdd30655e030', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '09387433-6551-4123-9495-cdd30655e030', 'serial': '09387433-6551-4123-9495-cdd30655e030'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1089.356554] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.433s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.357265] env[65503]: WARNING neutronclient.v2_0.client [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1089.359625] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.844s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.359877] env[65503]: DEBUG nova.objects.instance [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lazy-loading 'resources' on Instance uuid 7802baf8-04ff-4df4-90b0-71cb97dddc83 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.394025] env[65503]: INFO nova.network.neutron [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating port 96370c0c-da2e-4229-82a1-a24b799e6402 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1089.550911] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450672, 'name': Rename_Task, 'duration_secs': 0.14708} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.551256] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1089.551518] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ad564dc-d523-4d99-9661-6a116581012d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.562139] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1089.562139] env[65503]: value = "task-4450673" [ 1089.562139] env[65503]: _type = "Task" [ 1089.562139] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.571960] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.607728] env[65503]: DEBUG nova.compute.utils [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1089.637694] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52488ee1-7686-97be-06a8-77232316abf0, 'name': SearchDatastore_Task, 'duration_secs': 0.01141} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.639030] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aefb509-6b26-45c6-ae53-c28442bedcf2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.646651] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1089.646651] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b45a1f-8db3-7475-5d7b-9b816c0aaf71" [ 1089.646651] env[65503]: _type = "Task" [ 1089.646651] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.656745] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b45a1f-8db3-7475-5d7b-9b816c0aaf71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.680099] env[65503]: WARNING neutronclient.v2_0.client [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1089.680975] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1089.681265] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1089.733231] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f2c8f891-e25a-4060-8c75-e14cf594990c tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-1d1a96cc-63b3-472c-b94a-1ea00763f770-884998aa-5fa4-410b-87e8-843ac2653ab7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.351s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.874683] env[65503]: DEBUG nova.objects.instance [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'pci_requests' on Instance uuid f71dca10-3b68-4f1e-868e-a8c8271f7c88 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.906998] env[65503]: DEBUG nova.objects.instance [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid f89ca00e-d54e-4040-bf18-9a5ec96378d5 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.040899] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e826320-a226-45cf-8d60-e2291d55fb88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.055607] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aad9e66-4444-40ab-84d2-c639754df1d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.091638] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b41575-45f1-4a86-b087-1c5171878c1b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.097816] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450673, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.103602] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b872f65-8e0a-4a4c-a965-cbc60ea321ac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.110615] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.119614] env[65503]: DEBUG nova.compute.provider_tree [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.157640] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b45a1f-8db3-7475-5d7b-9b816c0aaf71, 'name': SearchDatastore_Task, 'duration_secs': 0.033937} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.157961] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.158284] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bc700871-233c-4aa0-867e-4f166b6f54d1/bc700871-233c-4aa0-867e-4f166b6f54d1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.158574] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2d1d705-da9a-4788-9bdd-85587d9c31a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.228228] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1090.228228] env[65503]: value = "task-4450674" [ 1090.228228] env[65503]: _type = "Task" [ 1090.228228] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.239295] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.378321] env[65503]: DEBUG nova.objects.base [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1090.378554] env[65503]: DEBUG nova.network.neutron [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1090.378913] env[65503]: WARNING neutronclient.v2_0.client [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1090.379264] env[65503]: WARNING neutronclient.v2_0.client [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1090.379903] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1090.380365] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1090.573289] env[65503]: DEBUG oslo_vmware.api [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450673, 'name': PowerOnVM_Task, 'duration_secs': 0.769476} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.573588] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1090.573781] env[65503]: INFO nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Took 7.68 seconds to spawn the instance on the hypervisor. [ 1090.573957] env[65503]: DEBUG nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1090.574802] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24e36db-76e0-40a9-b63f-a54db875883e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.623510] env[65503]: DEBUG nova.scheduler.client.report [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1090.726028] env[65503]: DEBUG nova.compute.manager [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1090.726360] env[65503]: DEBUG nova.compute.manager [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing instance network info cache due to event network-changed-b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1090.726573] env[65503]: DEBUG oslo_concurrency.lockutils [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Acquiring lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.726903] env[65503]: DEBUG oslo_concurrency.lockutils [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Acquired lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.726903] env[65503]: DEBUG nova.network.neutron [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Refreshing network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1090.743927] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450674, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.763619] env[65503]: DEBUG nova.policy [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b617c5c7508403f9bef0d6b436f541d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be67f50c5bc447309d4c04f3f2805455', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.913783] env[65503]: DEBUG oslo_concurrency.lockutils [None req-54f3f931-bec3-40fc-bc7b-6e69c0f2708b tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.321s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.964043] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.964161] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.964250] env[65503]: DEBUG nova.network.neutron [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1091.094321] env[65503]: INFO nova.compute.manager [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Took 12.69 seconds to build instance. [ 1091.128736] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.151263] env[65503]: INFO nova.scheduler.client.report [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted allocations for instance 7802baf8-04ff-4df4-90b0-71cb97dddc83 [ 1091.203843] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.204170] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.204454] env[65503]: INFO nova.compute.manager [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Attaching volume 1c9d3e4e-c23c-460d-827c-3a19ff329147 to /dev/sdb [ 1091.234496] env[65503]: WARNING neutronclient.v2_0.client [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1091.235216] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1091.235493] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1091.252765] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686566} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.252765] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bc700871-233c-4aa0-867e-4f166b6f54d1/bc700871-233c-4aa0-867e-4f166b6f54d1.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1091.252765] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.252765] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-397bdab5-fdbe-41f4-8060-19e5b3242b60 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.257174] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c005bd96-acdd-47b8-a9d6-048d366a4620 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.268021] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67f5bf2-84de-4f60-95bf-4a8d0d9162a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.270790] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1091.270790] env[65503]: value = "task-4450675" [ 1091.270790] env[65503]: _type = "Task" [ 1091.270790] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.281410] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.286341] env[65503]: DEBUG nova.virt.block_device [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating existing volume attachment record: ca91dd35-4122-44b4-b235-9e8938296663 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1091.467877] env[65503]: WARNING neutronclient.v2_0.client [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1091.468639] env[65503]: WARNING openstack [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1091.468984] env[65503]: WARNING openstack [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1091.596555] env[65503]: DEBUG oslo_concurrency.lockutils [None req-037d1ff8-b155-4a26-bbef-fc487f680283 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.208s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.665612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d0836731-663f-4ca2-8a7d-0541913541a4 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "7802baf8-04ff-4df4-90b0-71cb97dddc83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.595s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.783094] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078549} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.783094] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.783094] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cb2beb-5efc-49a9-a390-3e876b16982e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.808500] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] bc700871-233c-4aa0-867e-4f166b6f54d1/bc700871-233c-4aa0-867e-4f166b6f54d1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.808823] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-216bee8e-13f7-487e-abf0-d522f6914950 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.832612] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1091.832612] env[65503]: value = "task-4450679" [ 1091.832612] env[65503]: _type = "Task" [ 1091.832612] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.843825] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450679, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.348477] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450679, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.498671] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1092.499082] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1092.556537] env[65503]: WARNING openstack [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1092.556962] env[65503]: WARNING openstack [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1092.614361] env[65503]: DEBUG nova.network.neutron [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Successfully updated port: 884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1092.635725] env[65503]: WARNING neutronclient.v2_0.client [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1092.636573] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1092.636945] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1092.663733] env[65503]: WARNING neutronclient.v2_0.client [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1092.664542] env[65503]: WARNING openstack [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1092.665110] env[65503]: WARNING openstack [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1092.760443] env[65503]: DEBUG nova.network.neutron [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96370c0c-da", "ovs_interfaceid": "96370c0c-da2e-4229-82a1-a24b799e6402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1092.764493] env[65503]: DEBUG nova.network.neutron [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updated VIF entry in instance network info cache for port b5a654cb-c44f-45fc-bf0c-429ff06916f0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1092.765294] env[65503]: DEBUG nova.network.neutron [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [{"id": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "address": "fa:16:3e:2b:7c:50", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a654cb-c4", "ovs_interfaceid": "b5a654cb-c44f-45fc-bf0c-429ff06916f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1092.845825] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450679, 'name': ReconfigVM_Task, 'duration_secs': 0.764614} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.846228] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Reconfigured VM instance instance-00000067 to attach disk [datastore2] bc700871-233c-4aa0-867e-4f166b6f54d1/bc700871-233c-4aa0-867e-4f166b6f54d1.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.846945] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a15f145-cb85-4dfd-89fb-1adde13f5870 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.854239] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1092.854239] env[65503]: value = "task-4450680" [ 1092.854239] env[65503]: _type = "Task" [ 1092.854239] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.864238] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450680, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.116915] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.117127] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.117338] env[65503]: DEBUG nova.network.neutron [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1093.179038] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-vif-plugged-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1093.179286] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.179521] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.179874] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.179874] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] No waiting events found dispatching network-vif-plugged-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1093.180097] env[65503]: WARNING nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received unexpected event network-vif-plugged-96370c0c-da2e-4229-82a1-a24b799e6402 for instance with vm_state shelved_offloaded and task_state spawning. [ 1093.180214] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1093.180366] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing instance network info cache due to event network-changed-96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1093.180489] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Acquiring lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.263165] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.266177] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Acquired lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.266285] env[65503]: DEBUG nova.network.neutron [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Refreshing network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1093.268185] env[65503]: DEBUG oslo_concurrency.lockutils [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Releasing lock "refresh_cache-1d1a96cc-63b3-472c-b94a-1ea00763f770" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.268406] env[65503]: DEBUG nova.compute.manager [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1093.268589] env[65503]: DEBUG nova.compute.manager [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing instance network info cache due to event network-changed-aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1093.268785] env[65503]: DEBUG oslo_concurrency.lockutils [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.366719] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450680, 'name': Rename_Task, 'duration_secs': 0.194023} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.367079] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.367399] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f0ce662-37ec-472d-9402-4a8ef3c9fc97 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.377308] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1093.377308] env[65503]: value = "task-4450681" [ 1093.377308] env[65503]: _type = "Task" [ 1093.377308] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.390606] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.443259] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='3bc66a17d94d06f1182f2bc2dde4209d',container_format='bare',created_at=2025-11-14T15:55:25Z,direct_url=,disk_format='vmdk',id=4fad5934-e42b-4e2d-849e-59f48c65fe4b,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-344195951-shelved',owner='592efb180976432cbcecb9ad421e1bd1',properties=ImageMetaProps,protected=,size=31660544,status='active',tags=,updated_at=2025-11-14T15:55:39Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1093.443850] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1093.443887] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1093.446532] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1093.446692] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1093.446872] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1093.447125] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1093.447279] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1093.447497] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1093.447741] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1093.447808] env[65503]: DEBUG nova.virt.hardware [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1093.449503] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b18934-d537-4c3e-911f-f937b46f63d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.458931] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03f8ad4-4476-4c16-8c80-b8772e318f2a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.475707] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:2f:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96370c0c-da2e-4229-82a1-a24b799e6402', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1093.484654] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1093.484812] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1093.485267] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d237baa3-63a2-4131-bdbe-e1a24dfa7a87 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.510551] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.510551] env[65503]: value = "task-4450682" [ 1093.510551] env[65503]: _type = "Task" [ 1093.510551] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.521740] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450682, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.620485] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1093.621136] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1093.690775] env[65503]: WARNING nova.network.neutron [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] d2c28c8e-55de-416b-97e1-c5ea06e7f107 already exists in list: networks containing: ['d2c28c8e-55de-416b-97e1-c5ea06e7f107']. ignoring it [ 1093.733057] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1093.733057] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1093.771290] env[65503]: WARNING neutronclient.v2_0.client [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1093.771831] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1093.772527] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1093.819131] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.820907] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.889539] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450681, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.890811] env[65503]: WARNING neutronclient.v2_0.client [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1093.892239] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1093.892239] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1093.909376] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "eb18fc5c-168b-4442-af66-15e255ecc535" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.909612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.909781] env[65503]: DEBUG nova.compute.manager [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1093.910719] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa60077-cdc0-4b73-aa48-574ebe8022cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.918920] env[65503]: DEBUG nova.compute.manager [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1093.920282] env[65503]: DEBUG nova.objects.instance [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'flavor' on Instance uuid eb18fc5c-168b-4442-af66-15e255ecc535 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.944918] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1093.947511] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1094.003752] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1094.004236] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1094.012263] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "f5319f15-16eb-468a-a70e-7226963ed219" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.012607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "f5319f15-16eb-468a-a70e-7226963ed219" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.012898] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "f5319f15-16eb-468a-a70e-7226963ed219-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.013150] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "f5319f15-16eb-468a-a70e-7226963ed219-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.013396] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "f5319f15-16eb-468a-a70e-7226963ed219-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.025321] env[65503]: INFO nova.compute.manager [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Terminating instance [ 1094.033252] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450682, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.085221] env[65503]: WARNING neutronclient.v2_0.client [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1094.088017] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1094.088017] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1094.114998] env[65503]: WARNING neutronclient.v2_0.client [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1094.115526] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1094.115930] env[65503]: WARNING openstack [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1094.203290] env[65503]: DEBUG nova.network.neutron [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updated VIF entry in instance network info cache for port 96370c0c-da2e-4229-82a1-a24b799e6402. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1094.203850] env[65503]: DEBUG nova.network.neutron [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [{"id": "96370c0c-da2e-4229-82a1-a24b799e6402", "address": "fa:16:3e:9a:2f:df", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96370c0c-da", "ovs_interfaceid": "96370c0c-da2e-4229-82a1-a24b799e6402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1094.246875] env[65503]: DEBUG nova.network.neutron [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "884998aa-5fa4-410b-87e8-843ac2653ab7", "address": "fa:16:3e:e7:92:31", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884998aa-5f", "ovs_interfaceid": "884998aa-5fa4-410b-87e8-843ac2653ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1094.323070] env[65503]: INFO nova.compute.manager [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Detaching volume 48fef821-c720-4862-96cf-43e3f851654f [ 1094.393550] env[65503]: DEBUG oslo_vmware.api [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450681, 'name': PowerOnVM_Task, 'duration_secs': 0.750451} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.393550] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.393550] env[65503]: INFO nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1094.393550] env[65503]: DEBUG nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1094.395136] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074c8593-e465-4e60-a22a-d7cde7a2daed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.409613] env[65503]: INFO nova.virt.block_device [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Attempting to driver detach volume 48fef821-c720-4862-96cf-43e3f851654f from mountpoint /dev/sdc [ 1094.409613] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1094.409613] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870469', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'name': 'volume-48fef821-c720-4862-96cf-43e3f851654f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'serial': '48fef821-c720-4862-96cf-43e3f851654f'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1094.409613] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28a7345-64ca-461a-a64a-5587397ffcb6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.442851] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea772216-e4d2-454a-801e-ff5f0eec775c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.462412] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cda1048-e359-4f52-8474-5638d31f202b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.477830] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3ef1f1-4e3a-4992-9932-8ff3684ccefe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.495745] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] The volume has not been displaced from its original location: [datastore2] volume-48fef821-c720-4862-96cf-43e3f851654f/volume-48fef821-c720-4862-96cf-43e3f851654f.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1094.501011] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfiguring VM instance instance-0000005b to detach disk 2002 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1094.502322] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e383d833-72ff-4475-87c2-87b8742a10c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.522399] env[65503]: DEBUG oslo_vmware.api [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1094.522399] env[65503]: value = "task-4450684" [ 1094.522399] env[65503]: _type = "Task" [ 1094.522399] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.525988] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450682, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.529682] env[65503]: DEBUG nova.compute.manager [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1094.529902] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1094.530649] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5371f7-d8b4-4cd5-a5e7-bdefdb116f9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.541799] env[65503]: DEBUG oslo_vmware.api [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450684, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.544198] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1094.544481] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-143c629c-c883-4618-bfa2-6aaaf3eeb566 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.552480] env[65503]: DEBUG oslo_vmware.api [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1094.552480] env[65503]: value = "task-4450685" [ 1094.552480] env[65503]: _type = "Task" [ 1094.552480] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.564433] env[65503]: DEBUG oslo_vmware.api [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.709518] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Releasing lock "refresh_cache-b00a98a4-4865-4a02-a353-3d1da9ef0e51" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.709807] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-vif-plugged-884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1094.710019] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Acquiring lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.710287] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.711086] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.711086] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] No waiting events found dispatching network-vif-plugged-884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1094.711086] env[65503]: WARNING nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received unexpected event network-vif-plugged-884998aa-5fa4-410b-87e8-843ac2653ab7 for instance with vm_state active and task_state None. [ 1094.711086] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-changed-884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1094.711386] env[65503]: DEBUG nova.compute.manager [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing instance network info cache due to event network-changed-884998aa-5fa4-410b-87e8-843ac2653ab7. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1094.711386] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.749565] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.750229] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.750405] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.751170] env[65503]: DEBUG oslo_concurrency.lockutils [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.751170] env[65503]: DEBUG nova.network.neutron [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1094.753609] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a64986b-0166-43ba-9037-10bcefbe4318 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.772093] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1094.772409] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1094.772562] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1094.772743] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1094.772908] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1094.773134] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1094.773403] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1094.773593] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1094.773821] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1094.773996] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1094.774184] env[65503]: DEBUG nova.virt.hardware [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1094.781761] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Reconfiguring VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1094.782971] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d03b614a-b6a4-4806-9a28-91201706a1c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.804341] env[65503]: DEBUG oslo_vmware.api [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1094.804341] env[65503]: value = "task-4450687" [ 1094.804341] env[65503]: _type = "Task" [ 1094.804341] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.815091] env[65503]: DEBUG oslo_vmware.api [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450687, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.924124] env[65503]: INFO nova.compute.manager [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Took 14.82 seconds to build instance. [ 1094.949016] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1094.951753] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-266972de-caa6-4635-b1fc-1633c4040697 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.958843] env[65503]: DEBUG oslo_vmware.api [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1094.958843] env[65503]: value = "task-4450690" [ 1094.958843] env[65503]: _type = "Task" [ 1094.958843] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.971768] env[65503]: DEBUG oslo_vmware.api [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.030914] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450682, 'name': CreateVM_Task, 'duration_secs': 1.411855} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.035186] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1095.035852] env[65503]: WARNING neutronclient.v2_0.client [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1095.036347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.036580] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.037041] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1095.038355] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3de7cc51-477e-434d-bad3-ad4501318708 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.044282] env[65503]: DEBUG oslo_vmware.api [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450684, 'name': ReconfigVM_Task, 'duration_secs': 0.315882} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.046423] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Reconfigured VM instance instance-0000005b to detach disk 2002 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1095.054025] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1095.054025] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520657f5-682e-0222-ec5b-be9518ae5b18" [ 1095.054025] env[65503]: _type = "Task" [ 1095.054025] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.054557] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ca5daea-3342-4c2a-9e30-6969dbadd4e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.081465] env[65503]: DEBUG oslo_vmware.api [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450685, 'name': PowerOffVM_Task, 'duration_secs': 0.225748} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.088763] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.088912] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1095.093063] env[65503]: DEBUG oslo_vmware.api [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1095.093063] env[65503]: value = "task-4450691" [ 1095.093063] env[65503]: _type = "Task" [ 1095.093063] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.093063] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d47df51-b00d-40fd-a119-0aff2f3221f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.095345] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.095569] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.097413] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.097540] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Processing image 4fad5934-e42b-4e2d-849e-59f48c65fe4b {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1095.097750] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b/4fad5934-e42b-4e2d-849e-59f48c65fe4b.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.098043] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b/4fad5934-e42b-4e2d-849e-59f48c65fe4b.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.098159] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1095.098914] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6084c20-f7d1-4dc7-9276-c1393b122426 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.116762] env[65503]: DEBUG oslo_vmware.api [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450691, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.131816] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1095.132229] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1095.134198] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7031631-8ea5-4519-80a3-fc6f9165cd23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.148947] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1095.148947] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5239c93a-177f-7fb3-e6ca-f0f143ab9172" [ 1095.148947] env[65503]: _type = "Task" [ 1095.148947] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.162847] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5239c93a-177f-7fb3-e6ca-f0f143ab9172, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.198215] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1095.198404] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1095.198702] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleting the datastore file [datastore2] f5319f15-16eb-468a-a70e-7226963ed219 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.199133] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9327287b-5a08-4867-b404-8c280a806ea6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.208603] env[65503]: DEBUG oslo_vmware.api [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1095.208603] env[65503]: value = "task-4450693" [ 1095.208603] env[65503]: _type = "Task" [ 1095.208603] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.219034] env[65503]: DEBUG oslo_vmware.api [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450693, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.258203] env[65503]: WARNING neutronclient.v2_0.client [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1095.259079] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1095.259601] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1095.318149] env[65503]: DEBUG oslo_vmware.api [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450687, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.427339] env[65503]: DEBUG oslo_concurrency.lockutils [None req-16d2fe45-1089-47fc-a8d0-5318264a3a4d tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.346s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.429889] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1095.430638] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1095.471555] env[65503]: DEBUG oslo_vmware.api [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450690, 'name': PowerOffVM_Task, 'duration_secs': 0.288941} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.478574] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.478803] env[65503]: DEBUG nova.compute.manager [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1095.479948] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcd1d0f-0b2c-4d04-91e2-803aa77cb4d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.518643] env[65503]: WARNING neutronclient.v2_0.client [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1095.520125] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1095.520510] env[65503]: WARNING openstack [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1095.600182] env[65503]: DEBUG nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1095.616465] env[65503]: DEBUG oslo_vmware.api [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450691, 'name': ReconfigVM_Task, 'duration_secs': 0.2425} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.617017] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870469', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'name': 'volume-48fef821-c720-4862-96cf-43e3f851654f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f89ca00e-d54e-4040-bf18-9a5ec96378d5', 'attached_at': '', 'detached_at': '', 'volume_id': '48fef821-c720-4862-96cf-43e3f851654f', 'serial': '48fef821-c720-4862-96cf-43e3f851654f'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1095.636916] env[65503]: DEBUG nova.network.neutron [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updated VIF entry in instance network info cache for port aba660fb-0b53-43b7-a795-9ebaa9dd3097. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1095.637678] env[65503]: DEBUG nova.network.neutron [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "884998aa-5fa4-410b-87e8-843ac2653ab7", "address": "fa:16:3e:e7:92:31", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884998aa-5f", "ovs_interfaceid": "884998aa-5fa4-410b-87e8-843ac2653ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1095.662341] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Preparing fetch location {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1095.662587] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Fetch image to [datastore1] OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4/OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4.vmdk {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1095.662766] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Downloading stream optimized image 4fad5934-e42b-4e2d-849e-59f48c65fe4b to [datastore1] OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4/OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4.vmdk on the data store datastore1 as vApp {{(pid=65503) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1095.662930] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Downloading image file data 4fad5934-e42b-4e2d-849e-59f48c65fe4b to the ESX as VM named 'OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4' {{(pid=65503) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1095.723079] env[65503]: DEBUG oslo_vmware.api [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450693, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228257} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.723755] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1095.723755] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1095.723755] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1095.723918] env[65503]: INFO nova.compute.manager [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1095.724785] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1095.724785] env[65503]: DEBUG nova.compute.manager [-] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1095.724785] env[65503]: DEBUG nova.network.neutron [-] [instance: f5319f15-16eb-468a-a70e-7226963ed219] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1095.724785] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1095.725276] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1095.725526] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1095.760019] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1095.760019] env[65503]: value = "resgroup-9" [ 1095.760019] env[65503]: _type = "ResourcePool" [ 1095.760019] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1095.760019] env[65503]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d8e21bc0-e3e9-4eca-a559-0af10f7a981c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.777469] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1095.787605] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease: (returnval){ [ 1095.787605] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba6515-0607-8a0b-b23c-c2ebba51d7fe" [ 1095.787605] env[65503]: _type = "HttpNfcLease" [ 1095.787605] env[65503]: } obtained for vApp import into resource pool (val){ [ 1095.787605] env[65503]: value = "resgroup-9" [ 1095.787605] env[65503]: _type = "ResourcePool" [ 1095.787605] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1095.788055] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the lease: (returnval){ [ 1095.788055] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba6515-0607-8a0b-b23c-c2ebba51d7fe" [ 1095.788055] env[65503]: _type = "HttpNfcLease" [ 1095.788055] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1095.795611] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1095.795611] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba6515-0607-8a0b-b23c-c2ebba51d7fe" [ 1095.795611] env[65503]: _type = "HttpNfcLease" [ 1095.795611] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1095.816189] env[65503]: DEBUG oslo_vmware.api [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450687, 'name': ReconfigVM_Task, 'duration_secs': 0.83513} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.816669] env[65503]: WARNING neutronclient.v2_0.client [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1095.816891] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.817121] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Reconfigured VM to attach interface {{(pid=65503) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1095.843133] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1095.843415] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870473', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'name': 'volume-1c9d3e4e-c23c-460d-827c-3a19ff329147', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f55645-f152-4fc9-a962-393f9a9b9c55', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'serial': '1c9d3e4e-c23c-460d-827c-3a19ff329147'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1095.844358] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7dc62d-1f0d-4521-8722-6f6539371678 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.865125] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55283641-04a9-4bce-a56d-e671d6ea5bdd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.897107] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-1c9d3e4e-c23c-460d-827c-3a19ff329147/volume-1c9d3e4e-c23c-460d-827c-3a19ff329147.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.897578] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2107d2b7-b5e7-467f-be68-df40a86e4a4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.918480] env[65503]: DEBUG oslo_vmware.api [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1095.918480] env[65503]: value = "task-4450695" [ 1095.918480] env[65503]: _type = "Task" [ 1095.918480] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.929198] env[65503]: DEBUG oslo_vmware.api [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450695, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.993404] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac6659d7-9101-403c-8be2-8a52f86f4c31 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.083s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.133296] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.133784] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.136586] env[65503]: INFO nova.compute.claims [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1096.142176] env[65503]: DEBUG oslo_concurrency.lockutils [req-7d9db2fe-fb2f-460a-8cf6-f2927a4ae928 req-7e67ed84-b31c-43b8-af00-b336269045b8 service nova] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.142802] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.143322] env[65503]: DEBUG nova.network.neutron [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Refreshing network info cache for port 884998aa-5fa4-410b-87e8-843ac2653ab7 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1096.171852] env[65503]: DEBUG nova.objects.instance [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'flavor' on Instance uuid f89ca00e-d54e-4040-bf18-9a5ec96378d5 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.301787] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1096.301787] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba6515-0607-8a0b-b23c-c2ebba51d7fe" [ 1096.301787] env[65503]: _type = "HttpNfcLease" [ 1096.301787] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1096.322438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-21fa6803-3715-415c-8710-00e1b23bdbe3 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-f71dca10-3b68-4f1e-868e-a8c8271f7c88-884998aa-5fa4-410b-87e8-843ac2653ab7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.146s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.434457] env[65503]: DEBUG oslo_vmware.api [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450695, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.532059] env[65503]: DEBUG nova.network.neutron [-] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1096.600131] env[65503]: DEBUG nova.compute.manager [req-ee472b3c-d2c1-46ac-84a0-db3c1af27d78 req-28630195-8ead-4f87-a59f-8dab31b7df02 service nova] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Received event network-vif-deleted-48c4b867-0208-4634-9669-18a33e2018c7 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1096.648177] env[65503]: WARNING neutronclient.v2_0.client [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1096.648888] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1096.649262] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1096.768628] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1096.768994] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1096.801166] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1096.801166] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba6515-0607-8a0b-b23c-c2ebba51d7fe" [ 1096.801166] env[65503]: _type = "HttpNfcLease" [ 1096.801166] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1096.801545] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1096.801545] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba6515-0607-8a0b-b23c-c2ebba51d7fe" [ 1096.801545] env[65503]: _type = "HttpNfcLease" [ 1096.801545] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1096.802514] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc1702e-9a1f-44e4-a8b6-04c508fee772 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.816343] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b3581-b9d0-45e1-d417-e37d7d780250/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1096.816542] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating HTTP connection to write to file with size = 31660544 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b3581-b9d0-45e1-d417-e37d7d780250/disk-0.vmdk. {{(pid=65503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1096.874243] env[65503]: WARNING neutronclient.v2_0.client [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1096.875071] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1096.875299] env[65503]: WARNING openstack [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1096.888736] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8b3ffc7c-b747-4e9c-81cb-24db4df31f9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.931716] env[65503]: DEBUG oslo_vmware.api [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450695, 'name': ReconfigVM_Task, 'duration_secs': 0.653575} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.932083] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-1c9d3e4e-c23c-460d-827c-3a19ff329147/volume-1c9d3e4e-c23c-460d-827c-3a19ff329147.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.941513] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2bdd249-473c-4ae3-9b89-ff3bf586e4d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.958896] env[65503]: DEBUG oslo_vmware.api [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1096.958896] env[65503]: value = "task-4450696" [ 1096.958896] env[65503]: _type = "Task" [ 1096.958896] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.968897] env[65503]: DEBUG oslo_vmware.api [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.992172] env[65503]: DEBUG nova.network.neutron [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updated VIF entry in instance network info cache for port 884998aa-5fa4-410b-87e8-843ac2653ab7. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1096.992172] env[65503]: DEBUG nova.network.neutron [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "884998aa-5fa4-410b-87e8-843ac2653ab7", "address": "fa:16:3e:e7:92:31", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884998aa-5f", "ovs_interfaceid": "884998aa-5fa4-410b-87e8-843ac2653ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1097.037780] env[65503]: INFO nova.compute.manager [-] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Took 1.31 seconds to deallocate network for instance. [ 1097.183337] env[65503]: DEBUG oslo_concurrency.lockutils [None req-39041646-3e82-4af1-a3b9-240511c5b3c1 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.362s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.297499] env[65503]: INFO nova.compute.manager [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Rebuilding instance [ 1097.377168] env[65503]: DEBUG nova.compute.manager [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1097.378552] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3457a6f-8a06-4db6-90bc-78a49d76f1d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.428892] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625dde3c-eb1b-4652-af32-9d039fed9fbe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.439268] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958556a4-64ea-4d51-8bb8-8552eee25bb8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.483785] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef1cb42-cc5b-4c2f-94f6-0bb5c91ad739 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.498198] env[65503]: DEBUG oslo_vmware.api [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450696, 'name': ReconfigVM_Task, 'duration_secs': 0.17837} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.498447] env[65503]: DEBUG oslo_concurrency.lockutils [req-0135b7bc-5fc1-425b-9850-90c54a430666 req-d4cff89a-3c0b-4d64-848f-5002cd0446f9 service nova] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.499257] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870473', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'name': 'volume-1c9d3e4e-c23c-460d-827c-3a19ff329147', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f55645-f152-4fc9-a962-393f9a9b9c55', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'serial': '1c9d3e4e-c23c-460d-827c-3a19ff329147'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1097.501935] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa5aa77-6051-4cf7-abed-8f3bce67daf3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.524381] env[65503]: DEBUG nova.compute.provider_tree [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.546256] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.623733] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.624176] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.624452] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.624718] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.625133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.627974] env[65503]: INFO nova.compute.manager [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Terminating instance [ 1098.029577] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "interface-f71dca10-3b68-4f1e-868e-a8c8271f7c88-884998aa-5fa4-410b-87e8-843ac2653ab7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.029577] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-f71dca10-3b68-4f1e-868e-a8c8271f7c88-884998aa-5fa4-410b-87e8-843ac2653ab7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.031659] env[65503]: DEBUG nova.scheduler.client.report [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.135729] env[65503]: DEBUG nova.compute.manager [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1098.136025] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1098.137051] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26975340-cc34-445f-9774-e54a0cef2f61 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.146151] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1098.146432] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecd7ed23-e846-4745-b047-e01647b528af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.153114] env[65503]: DEBUG oslo_vmware.api [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1098.153114] env[65503]: value = "task-4450698" [ 1098.153114] env[65503]: _type = "Task" [ 1098.153114] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.164014] env[65503]: DEBUG oslo_vmware.api [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450698, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.289811] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Completed reading data from the image iterator. {{(pid=65503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1098.290459] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b3581-b9d0-45e1-d417-e37d7d780250/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1098.292426] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655aee5f-3ff2-49d0-855c-e9f9ec221181 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.302902] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b3581-b9d0-45e1-d417-e37d7d780250/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1098.303156] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b3581-b9d0-45e1-d417-e37d7d780250/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1098.303450] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-87420d04-9bfe-48a3-99f6-04af58009338 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.403817] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1098.404260] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-107c5ec2-dcb7-4862-9efc-7899cf5d9f9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.411743] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1098.411743] env[65503]: value = "task-4450699" [ 1098.411743] env[65503]: _type = "Task" [ 1098.411743] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.420886] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.536216] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.536540] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.537313] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.537849] env[65503]: DEBUG nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1098.541426] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0631da02-f9d3-4639-9c93-1a788637d3c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.544202] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.998s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.544469] env[65503]: DEBUG nova.objects.instance [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lazy-loading 'resources' on Instance uuid f5319f15-16eb-468a-a70e-7226963ed219 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.547541] env[65503]: DEBUG nova.objects.instance [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lazy-loading 'flavor' on Instance uuid b7f55645-f152-4fc9-a962-393f9a9b9c55 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.564868] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d39e3d-2481-4e6b-8834-ee324c48b4ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.589132] env[65503]: WARNING neutronclient.v2_0.client [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1098.594569] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Reconfiguring VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1098.595312] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e975de90-ed9b-4d44-b4f0-cacbd21ebdbc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.615052] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1098.615052] env[65503]: value = "task-4450700" [ 1098.615052] env[65503]: _type = "Task" [ 1098.615052] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.624022] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.637875] env[65503]: DEBUG nova.compute.manager [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Received event network-changed-61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1098.638146] env[65503]: DEBUG nova.compute.manager [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Refreshing instance network info cache due to event network-changed-61496b79-7af0-4518-be7a-0b0c270e3eff. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1098.638397] env[65503]: DEBUG oslo_concurrency.lockutils [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Acquiring lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.638561] env[65503]: DEBUG oslo_concurrency.lockutils [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Acquired lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.638747] env[65503]: DEBUG nova.network.neutron [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Refreshing network info cache for port 61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1098.665896] env[65503]: DEBUG oslo_vmware.api [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450698, 'name': PowerOffVM_Task, 'duration_secs': 0.2818} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.666243] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1098.666477] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1098.666751] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f214cf42-a661-4765-868d-34f6dcd39e4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.761405] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1098.761555] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1098.761738] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Deleting the datastore file [datastore2] f89ca00e-d54e-4040-bf18-9a5ec96378d5 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.762058] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cfe4d57-7f44-423c-84d5-8965acfacbb3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.770549] env[65503]: DEBUG oslo_vmware.api [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for the task: (returnval){ [ 1098.770549] env[65503]: value = "task-4450702" [ 1098.770549] env[65503]: _type = "Task" [ 1098.770549] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.779138] env[65503]: DEBUG oslo_vmware.api [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.923406] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1098.923697] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1098.924513] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a82fb18-b969-44d9-af31-69a73cf0b5b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.932102] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1098.932398] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0499b973-202e-4dc7-bd66-869227ea8629 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.030101] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1099.030328] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1099.030502] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleting the datastore file [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1099.031099] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cdf3c3c-fe18-4046-a6b7-592b2afa2204 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.041223] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1099.041223] env[65503]: value = "task-4450704" [ 1099.041223] env[65503]: _type = "Task" [ 1099.041223] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.048407] env[65503]: DEBUG nova.compute.utils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1099.052652] env[65503]: DEBUG oslo_vmware.rw_handles [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b3581-b9d0-45e1-d417-e37d7d780250/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1099.052876] env[65503]: INFO nova.virt.vmwareapi.images [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Downloaded image file data 4fad5934-e42b-4e2d-849e-59f48c65fe4b [ 1099.054037] env[65503]: DEBUG nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1099.054037] env[65503]: DEBUG nova.network.neutron [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1099.054368] env[65503]: WARNING neutronclient.v2_0.client [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1099.054608] env[65503]: WARNING neutronclient.v2_0.client [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1099.055324] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1099.055681] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1099.063737] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c215e726-bc36-446b-89e8-6ccad1c2e3bc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.072439] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.091819] env[65503]: DEBUG oslo_concurrency.lockutils [None req-62e07f54-8030-43fc-9318-b2b6762f75f4 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.887s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.093753] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d442ab59-80ef-4ff4-9de1-036c9bdd9442 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.130356] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.137165] env[65503]: DEBUG nova.policy [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e97b5208de384c19bbc0e332b67fc4ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c5b0c3771b5434992cd58e1af539bde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1099.145284] env[65503]: WARNING neutronclient.v2_0.client [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1099.146124] env[65503]: WARNING openstack [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1099.146576] env[65503]: WARNING openstack [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1099.157075] env[65503]: INFO nova.virt.vmwareapi.images [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] The imported VM was unregistered [ 1099.159825] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Caching image {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1099.159953] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating directory with path [datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.161186] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4e86324-869e-4664-a1e7-2371ec2fa8ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.181755] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created directory with path [datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.181976] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4/OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4.vmdk to [datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b/4fad5934-e42b-4e2d-849e-59f48c65fe4b.vmdk. {{(pid=65503) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1099.182281] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-fe944b41-2093-448e-bce9-94c7ef0a2554 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.194367] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1099.194367] env[65503]: value = "task-4450706" [ 1099.194367] env[65503]: _type = "Task" [ 1099.194367] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.205165] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450706, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.279029] env[65503]: WARNING openstack [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1099.279523] env[65503]: WARNING openstack [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1099.292931] env[65503]: DEBUG oslo_vmware.api [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.316453] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521547cc-8b35-42c3-8cee-c3f083d80e45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.326117] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e03df7-a86c-48f4-9b73-f6eb30d44b23 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.362399] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20d9a51-1832-4155-8ea6-cccaf976b3fb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.371225] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2ec3be-b0ae-43af-84bb-412cad06e3ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.386451] env[65503]: DEBUG nova.compute.provider_tree [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.416605] env[65503]: WARNING neutronclient.v2_0.client [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1099.417425] env[65503]: WARNING openstack [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1099.417693] env[65503]: WARNING openstack [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1099.487346] env[65503]: DEBUG nova.network.neutron [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Successfully created port: d65d8d7d-0e52-489e-854b-0090c2083876 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1099.522325] env[65503]: DEBUG nova.network.neutron [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updated VIF entry in instance network info cache for port 61496b79-7af0-4518-be7a-0b0c270e3eff. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1099.523129] env[65503]: DEBUG nova.network.neutron [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updating instance_info_cache with network_info: [{"id": "61496b79-7af0-4518-be7a-0b0c270e3eff", "address": "fa:16:3e:5e:cf:59", "network": {"id": "5e066471-434b-4bf0-a5f6-d57f179d43c1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-548932524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34e8cd66745a40d2acebbce98050ee5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61496b79-7a", "ovs_interfaceid": "61496b79-7af0-4518-be7a-0b0c270e3eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1099.554277] env[65503]: DEBUG nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1099.557100] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.351333} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.558062] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1099.558149] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1099.558298] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1099.631329] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.707343] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450706, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.764670] env[65503]: INFO nova.compute.manager [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Rescuing [ 1099.765080] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.765363] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.765558] env[65503]: DEBUG nova.network.neutron [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1099.785243] env[65503]: DEBUG oslo_vmware.api [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Task: {'id': task-4450702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.551615} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.785968] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1099.786182] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1099.786404] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1099.786564] env[65503]: INFO nova.compute.manager [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1099.786784] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1099.787041] env[65503]: DEBUG nova.compute.manager [-] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1099.787145] env[65503]: DEBUG nova.network.neutron [-] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1099.787422] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1099.788091] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1099.788410] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1099.830509] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1099.889703] env[65503]: DEBUG nova.scheduler.client.report [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.025579] env[65503]: DEBUG oslo_concurrency.lockutils [req-3f3b7d38-2faa-4e3e-9dae-71b0f4957380 req-c2299886-5b31-47b5-9318-161fff800e6f service nova] Releasing lock "refresh_cache-bc700871-233c-4aa0-867e-4f166b6f54d1" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.129919] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.208629] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450706, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.269619] env[65503]: WARNING neutronclient.v2_0.client [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1100.270512] env[65503]: WARNING openstack [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1100.270946] env[65503]: WARNING openstack [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1100.395248] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.851s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.420284] env[65503]: INFO nova.scheduler.client.report [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted allocations for instance f5319f15-16eb-468a-a70e-7226963ed219 [ 1100.568422] env[65503]: DEBUG nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1100.605703] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1100.606075] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1100.606246] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1100.606431] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1100.606637] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1100.606790] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1100.607016] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1100.607232] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1100.607416] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1100.607903] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1100.607903] env[65503]: DEBUG nova.virt.hardware [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1100.610281] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c07834-fdd6-4734-8b9d-3afb673f1dcc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.629161] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1100.629409] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1100.629558] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1100.629729] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1100.629866] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1100.630031] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1100.630298] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1100.630481] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1100.631041] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1100.631943] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1100.632233] env[65503]: DEBUG nova.virt.hardware [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1100.633251] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805e0e4a-edf2-4ab2-b21d-784278703809 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.637745] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66867e1a-b2ef-4216-bfe3-beb893e1eabe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.648030] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.663279] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aeab92-996e-468c-82ef-255b2533bbdb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.680628] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:7a:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a7ba8d0-0208-4af7-af44-2a5ad382f9be', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.688315] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1100.688673] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.688936] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1a8fa16-5a30-4a73-8c8d-3a95712eeb17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.716041] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450706, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.717415] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.717415] env[65503]: value = "task-4450708" [ 1100.717415] env[65503]: _type = "Task" [ 1100.717415] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.728724] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450708, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.930027] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ea1b0428-1bf4-4f42-8c85-d8a4ad017492 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "f5319f15-16eb-468a-a70e-7226963ed219" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.916s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.115308] env[65503]: DEBUG nova.network.neutron [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Successfully updated port: d65d8d7d-0e52-489e-854b-0090c2083876 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1101.130630] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.145144] env[65503]: WARNING openstack [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1101.145608] env[65503]: WARNING openstack [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1101.199292] env[65503]: DEBUG nova.compute.manager [req-c3c1b8bf-3898-4a09-924b-8f964641ff75 req-fa7c9d1d-509f-4f6d-8d2f-4c8fbbd02805 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Received event network-vif-deleted-308ddfd2-c7be-4ccb-afb0-0baeec362526 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1101.199292] env[65503]: INFO nova.compute.manager [req-c3c1b8bf-3898-4a09-924b-8f964641ff75 req-fa7c9d1d-509f-4f6d-8d2f-4c8fbbd02805 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Neutron deleted interface 308ddfd2-c7be-4ccb-afb0-0baeec362526; detaching it from the instance and deleting it from the info cache [ 1101.199292] env[65503]: DEBUG nova.network.neutron [req-c3c1b8bf-3898-4a09-924b-8f964641ff75 req-fa7c9d1d-509f-4f6d-8d2f-4c8fbbd02805 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1101.225574] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450706, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.227540] env[65503]: DEBUG nova.compute.manager [req-6be83f99-4776-4768-9d5d-b44fe16a1b00 req-d540e6d4-5e77-48b8-af01-d24558b169e7 service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Received event network-vif-plugged-d65d8d7d-0e52-489e-854b-0090c2083876 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1101.227956] env[65503]: DEBUG oslo_concurrency.lockutils [req-6be83f99-4776-4768-9d5d-b44fe16a1b00 req-d540e6d4-5e77-48b8-af01-d24558b169e7 service nova] Acquiring lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.228349] env[65503]: DEBUG oslo_concurrency.lockutils [req-6be83f99-4776-4768-9d5d-b44fe16a1b00 req-d540e6d4-5e77-48b8-af01-d24558b169e7 service nova] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.228859] env[65503]: DEBUG oslo_concurrency.lockutils [req-6be83f99-4776-4768-9d5d-b44fe16a1b00 req-d540e6d4-5e77-48b8-af01-d24558b169e7 service nova] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.228859] env[65503]: DEBUG nova.compute.manager [req-6be83f99-4776-4768-9d5d-b44fe16a1b00 req-d540e6d4-5e77-48b8-af01-d24558b169e7 service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] No waiting events found dispatching network-vif-plugged-d65d8d7d-0e52-489e-854b-0090c2083876 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1101.229066] env[65503]: WARNING nova.compute.manager [req-6be83f99-4776-4768-9d5d-b44fe16a1b00 req-d540e6d4-5e77-48b8-af01-d24558b169e7 service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Received unexpected event network-vif-plugged-d65d8d7d-0e52-489e-854b-0090c2083876 for instance with vm_state building and task_state spawning. [ 1101.230631] env[65503]: WARNING neutronclient.v2_0.client [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1101.231640] env[65503]: WARNING openstack [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1101.232615] env[65503]: WARNING openstack [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1101.251036] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450708, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.255631] env[65503]: DEBUG nova.network.neutron [-] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1101.333506] env[65503]: DEBUG nova.network.neutron [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1101.622480] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "refresh_cache-dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.622938] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "refresh_cache-dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.622938] env[65503]: DEBUG nova.network.neutron [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1101.634025] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.702431] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b326752b-72a9-4b54-ac7f-a623c131c3a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.717618] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450706, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.721803] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2e74ec-32de-49a1-88e0-0aff49b6d6db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.747882] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450708, 'name': CreateVM_Task, 'duration_secs': 0.554625} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.748118] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1101.748705] env[65503]: WARNING neutronclient.v2_0.client [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 1101.749102] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.749278] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.749672] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1101.761756] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4730b40-de1a-4d06-aeab-42b39b977efc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.764055] env[65503]: INFO nova.compute.manager [-] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Took 1.98 seconds to deallocate network for instance. [ 1101.764799] env[65503]: DEBUG nova.compute.manager [req-c3c1b8bf-3898-4a09-924b-8f964641ff75 req-fa7c9d1d-509f-4f6d-8d2f-4c8fbbd02805 service nova] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Detach interface failed, port_id=308ddfd2-c7be-4ccb-afb0-0baeec362526, reason: Instance f89ca00e-d54e-4040-bf18-9a5ec96378d5 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1101.772587] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1101.772587] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b60271-3054-e39f-5715-89f2d68c5335" [ 1101.772587] env[65503]: _type = "Task" [ 1101.772587] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.784051] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b60271-3054-e39f-5715-89f2d68c5335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.836791] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.129557] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1102.129968] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1102.137417] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.176200] env[65503]: DEBUG nova.network.neutron [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1102.196983] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1102.197426] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1102.216938] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450706, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.879616} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.217281] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4/OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4.vmdk to [datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b/4fad5934-e42b-4e2d-849e-59f48c65fe4b.vmdk. [ 1102.218043] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Cleaning up location [datastore1] OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1102.218043] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_6fd5a28f-486a-476b-bf90-ff618e9e56b4 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1102.218043] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a7d3f19-469d-42bd-b0f2-9cd78ecda452 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.226288] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1102.226288] env[65503]: value = "task-4450710" [ 1102.226288] env[65503]: _type = "Task" [ 1102.226288] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.237081] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.272074] env[65503]: WARNING neutronclient.v2_0.client [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1102.273072] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1102.273585] env[65503]: WARNING openstack [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1102.288133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.288133] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.288392] env[65503]: DEBUG nova.objects.instance [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lazy-loading 'resources' on Instance uuid f89ca00e-d54e-4040-bf18-9a5ec96378d5 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.311402] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b60271-3054-e39f-5715-89f2d68c5335, 'name': SearchDatastore_Task, 'duration_secs': 0.049445} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.312519] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.313161] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.313481] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.313632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.313813] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.314418] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-492a5e59-df4b-44c5-95f1-4b3a3132cc41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.325883] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.326277] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.326912] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d69504fb-6c33-4355-b9e0-1f7014a2a140 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.340568] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1102.340568] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529abcf1-0850-6195-99b9-9827fbaaa2b8" [ 1102.340568] env[65503]: _type = "Task" [ 1102.340568] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.351527] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529abcf1-0850-6195-99b9-9827fbaaa2b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.379969] env[65503]: DEBUG nova.network.neutron [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Updating instance_info_cache with network_info: [{"id": "d65d8d7d-0e52-489e-854b-0090c2083876", "address": "fa:16:3e:cd:a3:cd", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd65d8d7d-0e", "ovs_interfaceid": "d65d8d7d-0e52-489e-854b-0090c2083876", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1102.629352] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.737026] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103804} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.737276] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1102.737438] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b/4fad5934-e42b-4e2d-849e-59f48c65fe4b.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.737679] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b/4fad5934-e42b-4e2d-849e-59f48c65fe4b.vmdk to [datastore1] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1102.737938] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7fa1f30-19bc-4d2d-8a2e-10ac14153e5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.746717] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1102.746717] env[65503]: value = "task-4450711" [ 1102.746717] env[65503]: _type = "Task" [ 1102.746717] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.754627] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.852744] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529abcf1-0850-6195-99b9-9827fbaaa2b8, 'name': SearchDatastore_Task, 'duration_secs': 0.023525} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.853594] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9caece25-1be8-4442-ae61-b4ee7afc2fe7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.861248] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1102.861248] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214df53-8304-a6d1-c356-47cebe3a7324" [ 1102.861248] env[65503]: _type = "Task" [ 1102.861248] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.869802] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214df53-8304-a6d1-c356-47cebe3a7324, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.884354] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "refresh_cache-dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.884648] env[65503]: DEBUG nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Instance network_info: |[{"id": "d65d8d7d-0e52-489e-854b-0090c2083876", "address": "fa:16:3e:cd:a3:cd", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd65d8d7d-0e", "ovs_interfaceid": "d65d8d7d-0e52-489e-854b-0090c2083876", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1102.885280] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:a3:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd65d8d7d-0e52-489e-854b-0090c2083876', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1102.892664] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1102.893904] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1102.896556] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0813d73-c6a3-4fe8-9432-da6a70df66f4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.916994] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1102.916994] env[65503]: value = "task-4450712" [ 1102.916994] env[65503]: _type = "Task" [ 1102.916994] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.928454] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450712, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.981970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5833568f-8de0-45bc-85d8-76a7b6f046be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.990223] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f6bdac-8713-4cf1-b9f9-32114c0ec16d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.022792] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e446f5d-fe63-4148-bde4-ce7c2967f9df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.032013] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e3342f-5486-4937-83a4-c52d597d2a90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.047469] env[65503]: DEBUG nova.compute.provider_tree [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.133363] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.254009] env[65503]: DEBUG nova.compute.manager [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Received event network-changed-d65d8d7d-0e52-489e-854b-0090c2083876 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1103.254381] env[65503]: DEBUG nova.compute.manager [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Refreshing instance network info cache due to event network-changed-d65d8d7d-0e52-489e-854b-0090c2083876. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1103.254624] env[65503]: DEBUG oslo_concurrency.lockutils [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Acquiring lock "refresh_cache-dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.254766] env[65503]: DEBUG oslo_concurrency.lockutils [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Acquired lock "refresh_cache-dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.254968] env[65503]: DEBUG nova.network.neutron [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Refreshing network info cache for port d65d8d7d-0e52-489e-854b-0090c2083876 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1103.260603] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.373530] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214df53-8304-a6d1-c356-47cebe3a7324, 'name': SearchDatastore_Task, 'duration_secs': 0.012289} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.373852] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.374132] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.374920] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e2dec8e-0774-459f-ba0a-98310d3c6712 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.382950] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1103.382950] env[65503]: value = "task-4450713" [ 1103.382950] env[65503]: _type = "Task" [ 1103.382950] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.392968] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.398033] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.398403] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72a9206e-c6d0-4226-9ef3-8edb96745091 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.406373] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1103.406373] env[65503]: value = "task-4450714" [ 1103.406373] env[65503]: _type = "Task" [ 1103.406373] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.416111] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.426724] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450712, 'name': CreateVM_Task, 'duration_secs': 0.344899} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.426940] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1103.427518] env[65503]: WARNING neutronclient.v2_0.client [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.427900] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.428062] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.428382] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1103.428657] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-789d90df-59aa-4cb2-9c16-27ef54601446 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.434215] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1103.434215] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba1195-a597-fc54-08bd-da1571e3d4f9" [ 1103.434215] env[65503]: _type = "Task" [ 1103.434215] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.444048] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba1195-a597-fc54-08bd-da1571e3d4f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.551047] env[65503]: DEBUG nova.scheduler.client.report [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.635243] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.764286] env[65503]: WARNING neutronclient.v2_0.client [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.765194] env[65503]: WARNING openstack [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1103.765730] env[65503]: WARNING openstack [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1103.774258] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.882688] env[65503]: WARNING openstack [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1103.883106] env[65503]: WARNING openstack [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1103.903608] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450713, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.920754] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.952987] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba1195-a597-fc54-08bd-da1571e3d4f9, 'name': SearchDatastore_Task, 'duration_secs': 0.010634} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.954858] env[65503]: WARNING neutronclient.v2_0.client [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.956026] env[65503]: WARNING openstack [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1103.956604] env[65503]: WARNING openstack [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1103.969709] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.970131] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1103.970527] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.970753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.971065] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.971843] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45597ff6-1350-4f55-8bef-dcd759dc76fa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.992666] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.992898] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1103.993718] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8485d97a-c7af-4fbf-9181-990847f19caf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.003310] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1104.003310] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523714af-cc2b-9a7f-a558-e71f15afce2e" [ 1104.003310] env[65503]: _type = "Task" [ 1104.003310] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.019382] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523714af-cc2b-9a7f-a558-e71f15afce2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.057233] env[65503]: DEBUG nova.network.neutron [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Updated VIF entry in instance network info cache for port d65d8d7d-0e52-489e-854b-0090c2083876. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1104.057233] env[65503]: DEBUG nova.network.neutron [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Updating instance_info_cache with network_info: [{"id": "d65d8d7d-0e52-489e-854b-0090c2083876", "address": "fa:16:3e:cd:a3:cd", "network": {"id": "ba2358e2-86fb-4ece-a7ad-347d29c8ffdf", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1461352880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c5b0c3771b5434992cd58e1af539bde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd65d8d7d-0e", "ovs_interfaceid": "d65d8d7d-0e52-489e-854b-0090c2083876", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1104.059065] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.771s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.087119] env[65503]: INFO nova.scheduler.client.report [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Deleted allocations for instance f89ca00e-d54e-4040-bf18-9a5ec96378d5 [ 1104.136941] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.263893] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.404245] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.797493} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.404563] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1104.404810] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1104.405201] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e6ad9c8-0d19-4caa-807b-9e810c0ccaa2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.422846] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450714, 'name': PowerOffVM_Task, 'duration_secs': 0.726992} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.425362] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.426747] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1104.426747] env[65503]: value = "task-4450716" [ 1104.426747] env[65503]: _type = "Task" [ 1104.426747] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.428157] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b072d93-c9b9-4b2e-ae9d-2b02475af7f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.466620] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7546bf59-7cc5-4249-8024-571677e40ea4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.470028] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.512397] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.512780] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6a21fe7-a156-4a2d-85a9-76d885539866 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.528097] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523714af-cc2b-9a7f-a558-e71f15afce2e, 'name': SearchDatastore_Task, 'duration_secs': 0.103663} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.530606] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1104.530606] env[65503]: value = "task-4450717" [ 1104.530606] env[65503]: _type = "Task" [ 1104.530606] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.530965] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21d5386a-687f-438a-b620-18f5d2cb2308 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.552637] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1104.552637] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52918674-f465-17b3-24f2-98379872c76e" [ 1104.552637] env[65503]: _type = "Task" [ 1104.552637] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.553071] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1104.553457] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1104.553529] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.553642] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.553882] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1104.554265] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0bca7c90-3e8a-4850-8f64-266ab32e47a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.563085] env[65503]: DEBUG oslo_concurrency.lockutils [req-1525426b-15cf-471c-adca-f88a3e4a6fc2 req-9123756b-464c-4387-9b0f-028a527d592d service nova] Releasing lock "refresh_cache-dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.570374] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52918674-f465-17b3-24f2-98379872c76e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.580759] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1104.581286] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1104.582013] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1edfdcb7-b590-4d8d-be1f-668265fa2ca7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.590793] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1104.590793] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ec31f-a846-c410-a91f-31f0c5f75806" [ 1104.590793] env[65503]: _type = "Task" [ 1104.590793] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.598088] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98985e26-4f46-407d-94f1-9fac35a9f3c3 tempest-AttachVolumeTestJSON-1693657745 tempest-AttachVolumeTestJSON-1693657745-project-member] Lock "f89ca00e-d54e-4040-bf18-9a5ec96378d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.974s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.604703] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ec31f-a846-c410-a91f-31f0c5f75806, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.635161] env[65503]: DEBUG oslo_vmware.api [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450700, 'name': ReconfigVM_Task, 'duration_secs': 5.90679} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.635636] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.635921] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Reconfigured VM to detach interface {{(pid=65503) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1104.636609] env[65503]: WARNING neutronclient.v2_0.client [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1104.636981] env[65503]: WARNING neutronclient.v2_0.client [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1104.637685] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1104.638080] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1104.688787] env[65503]: WARNING neutronclient.v2_0.client [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1104.764566] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.946996] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450716, 'name': ExtendVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.074261] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52918674-f465-17b3-24f2-98379872c76e, 'name': SearchDatastore_Task, 'duration_secs': 0.048999} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.074637] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.074898] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f/dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1105.075255] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e48cdc0d-64d0-4964-a2bd-44fbf277ed0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.086921] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1105.086921] env[65503]: value = "task-4450719" [ 1105.086921] env[65503]: _type = "Task" [ 1105.086921] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.104872] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.109816] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529ec31f-a846-c410-a91f-31f0c5f75806, 'name': SearchDatastore_Task, 'duration_secs': 0.093459} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.110736] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14eb7dc3-6c71-4f2a-abb6-1b53c67f169b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.121119] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1105.121119] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528ade6f-e35c-07b6-a91e-e10dad5f8107" [ 1105.121119] env[65503]: _type = "Task" [ 1105.121119] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.132956] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528ade6f-e35c-07b6-a91e-e10dad5f8107, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.261449] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.446975] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.53143} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.447655] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.448888] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cd214e-d692-45f0-a056-d05397066ed0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.478276] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.478483] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bc10176-2568-4819-ac82-85100817d7b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.503664] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1105.503664] env[65503]: value = "task-4450720" [ 1105.503664] env[65503]: _type = "Task" [ 1105.503664] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.515293] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450720, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.606337] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.636125] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.636516] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.636716] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.636894] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.637080] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.638795] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528ade6f-e35c-07b6-a91e-e10dad5f8107, 'name': SearchDatastore_Task, 'duration_secs': 0.089975} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.639606] env[65503]: INFO nova.compute.manager [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Terminating instance [ 1105.641094] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.641314] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. {{(pid=65503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1105.642625] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-820b06a4-d24e-43c0-adb7-b9635a204384 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.654242] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1105.654242] env[65503]: value = "task-4450721" [ 1105.654242] env[65503]: _type = "Task" [ 1105.654242] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.667143] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.763808] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.017226] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450720, 'name': ReconfigVM_Task, 'duration_secs': 0.439144} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.017530] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Reconfigured VM instance instance-00000066 to attach disk [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535/eb18fc5c-168b-4442-af66-15e255ecc535.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.018265] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7660d4e-df4b-4890-9950-2657e8dde4a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.028608] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1106.028608] env[65503]: value = "task-4450722" [ 1106.028608] env[65503]: _type = "Task" [ 1106.028608] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.040136] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450722, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.101586] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450719, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.108635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.109051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquired lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.109527] env[65503]: DEBUG nova.network.neutron [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1106.146398] env[65503]: DEBUG nova.compute.manager [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1106.146703] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1106.148505] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112feac3-30e0-4eb4-bb27-329fe3777ad7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.162422] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1106.163652] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cf7a2dd-7f71-411f-80b5-0a8feaac250a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.171195] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.178796] env[65503]: DEBUG oslo_vmware.api [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1106.178796] env[65503]: value = "task-4450724" [ 1106.178796] env[65503]: _type = "Task" [ 1106.178796] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.191030] env[65503]: DEBUG oslo_vmware.api [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.267738] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450711, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.351437} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.268029] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4fad5934-e42b-4e2d-849e-59f48c65fe4b/4fad5934-e42b-4e2d-849e-59f48c65fe4b.vmdk to [datastore1] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1106.268955] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f31de1-3ef1-4034-94d9-6b9e9fef499c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.304463] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.305096] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-199a9dcf-65b4-445b-b517-ea160de58845 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.333942] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1106.333942] env[65503]: value = "task-4450725" [ 1106.333942] env[65503]: _type = "Task" [ 1106.333942] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.346413] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.544979] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450722, 'name': Rename_Task, 'duration_secs': 0.197399} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.545518] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.545575] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5806a79b-37da-4be5-a350-94e316059d9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.557773] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1106.557773] env[65503]: value = "task-4450726" [ 1106.557773] env[65503]: _type = "Task" [ 1106.557773] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.569518] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.603095] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450719, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.334693} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.603496] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f/dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1106.603754] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1106.604061] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a268820e-df7b-4d8d-a9df-8ad26a5643f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.612693] env[65503]: WARNING neutronclient.v2_0.client [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1106.613454] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1106.613801] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1106.623959] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1106.623959] env[65503]: value = "task-4450727" [ 1106.623959] env[65503]: _type = "Task" [ 1106.623959] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.634576] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.666450] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450721, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.694958] env[65503]: DEBUG oslo_vmware.api [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450724, 'name': PowerOffVM_Task, 'duration_secs': 0.221288} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.702084] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.702301] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1106.703230] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-476e1d6e-2aa3-4977-adb9-0a2ce93be508 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.745208] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1106.745739] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1106.791985] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1106.792286] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1106.792548] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleting the datastore file [datastore2] f71dca10-3b68-4f1e-868e-a8c8271f7c88 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1106.792883] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80f055ba-ecde-41ba-94f5-a4d582f5ca22 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.803357] env[65503]: DEBUG oslo_vmware.api [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1106.803357] env[65503]: value = "task-4450729" [ 1106.803357] env[65503]: _type = "Task" [ 1106.803357] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.817186] env[65503]: DEBUG oslo_vmware.api [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450729, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.822928] env[65503]: WARNING neutronclient.v2_0.client [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1106.823767] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1106.824208] env[65503]: WARNING openstack [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1106.844576] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450725, 'name': ReconfigVM_Task, 'duration_secs': 0.412743} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.844902] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfigured VM instance instance-0000005a to attach disk [datastore1] b00a98a4-4865-4a02-a353-3d1da9ef0e51/b00a98a4-4865-4a02-a353-3d1da9ef0e51.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.846373] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'boot_index': 0, 'device_type': 'disk', 'encrypted': False, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'disk_bus': None, 'size': 0, 'encryption_format': None, 'image_id': 'd68ffece-ab91-4610-b535-fa1fb25ade93'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': None, 'mount_device': '/dev/sdb', 'device_type': None, 'attachment_id': '29168f2a-d51e-4c9c-8abb-c97edd9e5ceb', 'delete_on_termination': False, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870468', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'name': 'volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b00a98a4-4865-4a02-a353-3d1da9ef0e51', 'attached_at': '', 'detached_at': '', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'serial': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba'}, 'volume_type': None}], 'swap': None} {{(pid=65503) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1106.846577] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1106.846799] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870468', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'name': 'volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b00a98a4-4865-4a02-a353-3d1da9ef0e51', 'attached_at': '', 'detached_at': '', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'serial': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1106.847640] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d3fd57-dc69-40dd-9915-5c43745a693f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.865543] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d072e4-a44b-450a-bdd4-f89b2d6f7373 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.895201] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba/volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.895536] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b618b304-f9d0-4c44-8c8a-7d3991983c5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.910512] env[65503]: INFO nova.network.neutron [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Port 884998aa-5fa4-410b-87e8-843ac2653ab7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1106.910973] env[65503]: DEBUG nova.network.neutron [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [{"id": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "address": "fa:16:3e:b5:17:21", "network": {"id": "d2c28c8e-55de-416b-97e1-c5ea06e7f107", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1214855292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be67f50c5bc447309d4c04f3f2805455", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaba660fb-0b", "ovs_interfaceid": "aba660fb-0b53-43b7-a795-9ebaa9dd3097", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1106.918566] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1106.918566] env[65503]: value = "task-4450730" [ 1106.918566] env[65503]: _type = "Task" [ 1106.918566] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.929740] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450730, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.068800] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450726, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.134270] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.165112] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450721, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.203492} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.165475] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. [ 1107.166367] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8dbe27-614f-4b81-bf1c-20fbe75eabf5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.196277] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1107.196966] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faf5e13f-161c-4458-90be-f3858d6b4129 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.216857] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1107.216857] env[65503]: value = "task-4450731" [ 1107.216857] env[65503]: _type = "Task" [ 1107.216857] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.228284] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450731, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.316607] env[65503]: DEBUG oslo_vmware.api [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450729, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186304} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.316824] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.317071] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1107.317317] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1107.317503] env[65503]: INFO nova.compute.manager [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1107.317752] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1107.317945] env[65503]: DEBUG nova.compute.manager [-] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1107.318069] env[65503]: DEBUG nova.network.neutron [-] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1107.318358] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1107.318975] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1107.319278] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1107.360017] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1107.413798] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Releasing lock "refresh_cache-f71dca10-3b68-4f1e-868e-a8c8271f7c88" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.430526] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.568800] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450726, 'name': PowerOnVM_Task, 'duration_secs': 0.732223} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.569074] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1107.569316] env[65503]: DEBUG nova.compute.manager [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1107.570202] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b737edd3-6ecd-4901-8b44-419a2bcbd673 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.640065] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.700998} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.640065] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1107.640690] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653d66b9-b190-4986-b6b7-402d766d623e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.663879] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f/dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1107.667135] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f0f837a-328e-482e-be04-e15911c2279b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.688586] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1107.688586] env[65503]: value = "task-4450732" [ 1107.688586] env[65503]: _type = "Task" [ 1107.688586] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.700283] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450732, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.731244] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450731, 'name': ReconfigVM_Task, 'duration_secs': 0.331038} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.731706] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfigured VM instance instance-00000062 to attach disk [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.733204] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1a39f7-9a66-46b7-ba33-00d8702e6fdb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.780627] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffb558ff-feca-4e25-967e-54b6e27ae635 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.807335] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1107.807335] env[65503]: value = "task-4450733" [ 1107.807335] env[65503]: _type = "Task" [ 1107.807335] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.824602] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450733, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.919507] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7e37531b-c2f4-44ee-b06d-ac4d44a4328a tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "interface-f71dca10-3b68-4f1e-868e-a8c8271f7c88-884998aa-5fa4-410b-87e8-843ac2653ab7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.890s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.934190] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450730, 'name': ReconfigVM_Task, 'duration_secs': 0.929129} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.934677] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfigured VM instance instance-0000005a to attach disk [datastore2] volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba/volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.941906] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3654a675-f031-4f35-b7b1-a2e6114acebf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.960754] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1107.960754] env[65503]: value = "task-4450734" [ 1107.960754] env[65503]: _type = "Task" [ 1107.960754] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.974208] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450734, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.085749] env[65503]: INFO nova.compute.manager [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] bringing vm to original state: 'stopped' [ 1108.147963] env[65503]: DEBUG nova.compute.manager [req-8efa1b16-ae34-45c1-87d2-98d104b42c57 req-db8c3e4e-22cc-4fdd-832d-7e3d1b10eab0 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Received event network-vif-deleted-aba660fb-0b53-43b7-a795-9ebaa9dd3097 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1108.148240] env[65503]: INFO nova.compute.manager [req-8efa1b16-ae34-45c1-87d2-98d104b42c57 req-db8c3e4e-22cc-4fdd-832d-7e3d1b10eab0 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Neutron deleted interface aba660fb-0b53-43b7-a795-9ebaa9dd3097; detaching it from the instance and deleting it from the info cache [ 1108.148423] env[65503]: DEBUG nova.network.neutron [req-8efa1b16-ae34-45c1-87d2-98d104b42c57 req-db8c3e4e-22cc-4fdd-832d-7e3d1b10eab0 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1108.199912] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450732, 'name': ReconfigVM_Task, 'duration_secs': 0.289104} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.200289] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Reconfigured VM instance instance-00000068 to attach disk [datastore2] dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f/dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1108.201331] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea4a9ab4-62cc-448f-b1a0-ad92aafd991c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.211666] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1108.211666] env[65503]: value = "task-4450737" [ 1108.211666] env[65503]: _type = "Task" [ 1108.211666] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.224445] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450737, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.322981] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450733, 'name': ReconfigVM_Task, 'duration_secs': 0.37228} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.323674] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1108.324106] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4181e84-9547-4e91-ac09-d29086d267d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.333068] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1108.333068] env[65503]: value = "task-4450738" [ 1108.333068] env[65503]: _type = "Task" [ 1108.333068] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.350831] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.472504] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450734, 'name': ReconfigVM_Task, 'duration_secs': 0.296171} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.472783] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870468', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'name': 'volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b00a98a4-4865-4a02-a353-3d1da9ef0e51', 'attached_at': '', 'detached_at': '', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'serial': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1108.473463] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62d81d39-1e83-476b-b746-6768ffc0aea5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.481572] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1108.481572] env[65503]: value = "task-4450739" [ 1108.481572] env[65503]: _type = "Task" [ 1108.481572] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.496027] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450739, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.625269] env[65503]: DEBUG nova.network.neutron [None req-c8edd8f1-20d4-4241-84e8-2f9d2f4a191a None None] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1108.651150] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-533ecfe4-c632-497e-9a3a-fe0350d46939 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.661857] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702c79b3-9875-4899-b562-3282d13a1549 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.696023] env[65503]: DEBUG nova.compute.manager [req-8efa1b16-ae34-45c1-87d2-98d104b42c57 req-db8c3e4e-22cc-4fdd-832d-7e3d1b10eab0 service nova] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Detach interface failed, port_id=aba660fb-0b53-43b7-a795-9ebaa9dd3097, reason: Instance f71dca10-3b68-4f1e-868e-a8c8271f7c88 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1108.722802] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450737, 'name': Rename_Task, 'duration_secs': 0.157695} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.723165] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1108.723427] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1f11200-f78d-4d1c-b04d-8e7a5e56b568 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.732776] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1108.732776] env[65503]: value = "task-4450740" [ 1108.732776] env[65503]: _type = "Task" [ 1108.732776] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.744534] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.844912] env[65503]: DEBUG oslo_vmware.api [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450738, 'name': PowerOnVM_Task, 'duration_secs': 0.468561} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.845447] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1108.848158] env[65503]: DEBUG nova.compute.manager [None req-25120a05-6d67-4375-afb1-8a1b9eb10831 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1108.849075] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714f1ccb-eaca-4acf-8b9e-a088e67420de {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.993228] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450739, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.093671] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "eb18fc5c-168b-4442-af66-15e255ecc535" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.093949] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.094266] env[65503]: DEBUG nova.compute.manager [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1109.095272] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2e1206-4c7a-4c33-98c8-b3b0a905d542 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.102610] env[65503]: DEBUG nova.compute.manager [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1109.129164] env[65503]: INFO nova.compute.manager [None req-c8edd8f1-20d4-4241-84e8-2f9d2f4a191a None None] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Took 1.81 seconds to deallocate network for instance. [ 1109.247566] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450740, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.494691] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450739, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.607572] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.607790] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-649ccaea-c7d8-4190-bd2a-880b7d891ab6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.615920] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1109.615920] env[65503]: value = "task-4450741" [ 1109.615920] env[65503]: _type = "Task" [ 1109.615920] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.626120] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.636246] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.636521] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.636740] env[65503]: DEBUG nova.objects.instance [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'resources' on Instance uuid f71dca10-3b68-4f1e-868e-a8c8271f7c88 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.746095] env[65503]: DEBUG oslo_vmware.api [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450740, 'name': PowerOnVM_Task, 'duration_secs': 0.550769} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.746470] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1109.746703] env[65503]: INFO nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Took 9.18 seconds to spawn the instance on the hypervisor. [ 1109.746901] env[65503]: DEBUG nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1109.747754] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a9c5be-ea0c-4bde-b2dc-bfcae1afc208 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.993341] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450739, 'name': Rename_Task, 'duration_secs': 1.075852} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.993889] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1109.993889] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2884437d-8f71-4608-a60c-440c37d78b2d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.000531] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1110.000531] env[65503]: value = "task-4450742" [ 1110.000531] env[65503]: _type = "Task" [ 1110.000531] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.008988] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.127761] env[65503]: DEBUG oslo_vmware.api [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450741, 'name': PowerOffVM_Task, 'duration_secs': 0.207739} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.128083] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.128290] env[65503]: DEBUG nova.compute.manager [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1110.129170] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10586660-13ed-42a1-a557-bf21675f14fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.141888] env[65503]: INFO nova.compute.manager [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Unrescuing [ 1110.142683] env[65503]: DEBUG oslo_concurrency.lockutils [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.142683] env[65503]: DEBUG oslo_concurrency.lockutils [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquired lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.142683] env[65503]: DEBUG nova.network.neutron [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1110.273213] env[65503]: INFO nova.compute.manager [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Took 14.16 seconds to build instance. [ 1110.311643] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7be3c23-047f-4a90-b849-a03c29bff3b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.320794] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28ef454-9bcf-44c6-92f5-edb7c2713261 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.361581] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab2025e-fa92-4984-82c3-20ee8bffaa8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.370620] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8271b745-1b3c-4c55-96d8-9f6b73e1136f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.387592] env[65503]: DEBUG nova.compute.provider_tree [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.511220] env[65503]: DEBUG oslo_vmware.api [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450742, 'name': PowerOnVM_Task, 'duration_secs': 0.49185} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.511501] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1110.630195] env[65503]: DEBUG nova.compute.manager [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1110.631983] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0f7f87-a2f5-4c5c-b366-0e51e2c6ef04 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.644045] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.550s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.648212] env[65503]: WARNING neutronclient.v2_0.client [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1110.648948] env[65503]: WARNING openstack [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1110.649314] env[65503]: WARNING openstack [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1110.776128] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8ecbb11d-32df-42f5-a695-6a950885a1ef tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.680s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.780117] env[65503]: WARNING openstack [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1110.780562] env[65503]: WARNING openstack [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1110.840882] env[65503]: WARNING neutronclient.v2_0.client [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1110.841656] env[65503]: WARNING openstack [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1110.842032] env[65503]: WARNING openstack [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1110.890434] env[65503]: DEBUG nova.scheduler.client.report [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.923723] env[65503]: DEBUG nova.network.neutron [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1111.153117] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c16cdbe6-acd8-4824-80f5-70a273d8b74f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.102s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.154121] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.271798] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.272093] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.272366] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.272498] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.273065] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.273065] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.273065] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.273232] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1111.273283] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.395810] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.398347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.244s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.398530] env[65503]: DEBUG nova.objects.instance [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1111.421559] env[65503]: INFO nova.scheduler.client.report [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted allocations for instance f71dca10-3b68-4f1e-868e-a8c8271f7c88 [ 1111.428476] env[65503]: DEBUG oslo_concurrency.lockutils [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Releasing lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.429164] env[65503]: DEBUG nova.objects.instance [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lazy-loading 'flavor' on Instance uuid b7f55645-f152-4fc9-a962-393f9a9b9c55 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.431589] env[65503]: DEBUG nova.compute.manager [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1111.432911] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52f7e0a-e155-4cf5-b5ba-65b73fa58b6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.776587] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.929147] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ec1366d-d096-4fb0-b9aa-a683458e7510 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "f71dca10-3b68-4f1e-868e-a8c8271f7c88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.293s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.936040] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742d6f48-7e3f-45d0-8caa-af822d2f3245 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.962108] env[65503]: INFO nova.compute.manager [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] instance snapshotting [ 1111.963759] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.964555] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-608a913e-ffd0-4fe2-aba8-e15e0a2a8646 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.966718] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02910497-219f-451f-b367-ac4b8c487e42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.989526] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3579fa1d-a095-4ad6-ba0a-723fc63f483a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.992390] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1111.992390] env[65503]: value = "task-4450745" [ 1111.992390] env[65503]: _type = "Task" [ 1111.992390] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.003665] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.117235] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "eb18fc5c-168b-4442-af66-15e255ecc535" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.117545] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.117797] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "eb18fc5c-168b-4442-af66-15e255ecc535-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.118012] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.118240] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.120916] env[65503]: INFO nova.compute.manager [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Terminating instance [ 1112.307607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.307916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.308137] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.308321] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.308482] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.311058] env[65503]: INFO nova.compute.manager [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Terminating instance [ 1112.409230] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2b0b9b34-e688-45e0-ac32-2fdf3fa16a1f tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.410392] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.634s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.410578] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.410728] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1112.411785] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68d7a14-bf67-4439-8d0d-d29e868f5c72 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.421216] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f30c873-eb1c-46ae-b935-68feec63394d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.437486] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70324d6-061f-46cb-b650-00b621d3ad3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.445272] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c5d0cc-6573-4cad-ae80-dc216d0e751f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.477388] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178973MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1112.477590] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.477739] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.502307] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450745, 'name': PowerOffVM_Task, 'duration_secs': 0.284815} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.502558] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.507694] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfiguring VM instance instance-00000062 to detach disk 2002 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1112.508691] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1112.508927] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-197ccef2-1f32-4627-99be-0223fe8e5361 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.521840] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-26edabfc-86c5-4854-9ab7-92448f20a811 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.530651] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1112.530651] env[65503]: value = "task-4450746" [ 1112.530651] env[65503]: _type = "Task" [ 1112.530651] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.531927] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1112.531927] env[65503]: value = "task-4450747" [ 1112.531927] env[65503]: _type = "Task" [ 1112.531927] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.545629] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450746, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.549444] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450747, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.625962] env[65503]: DEBUG nova.compute.manager [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1112.626602] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.627413] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08002c6b-5eef-4488-be69-c9d19aa41c67 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.636507] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.636862] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-834bf7af-2c49-45a4-9f67-185d1004b5d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.703708] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.704050] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.704271] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleting the datastore file [datastore2] eb18fc5c-168b-4442-af66-15e255ecc535 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.704605] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25bc45de-f374-4b9d-9eef-c0b27f266bf0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.712496] env[65503]: DEBUG oslo_vmware.api [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1112.712496] env[65503]: value = "task-4450749" [ 1112.712496] env[65503]: _type = "Task" [ 1112.712496] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.723222] env[65503]: DEBUG oslo_vmware.api [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.817879] env[65503]: DEBUG nova.compute.manager [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1112.818231] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.819122] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4d0b85-5f84-4818-b7f7-7a65ee968a55 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.827851] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1112.828179] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9458409-ddb8-4678-a93b-91396a606caf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.835371] env[65503]: DEBUG oslo_vmware.api [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1112.835371] env[65503]: value = "task-4450750" [ 1112.835371] env[65503]: _type = "Task" [ 1112.835371] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.845809] env[65503]: DEBUG oslo_vmware.api [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.047803] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450746, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.050758] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450747, 'name': ReconfigVM_Task, 'duration_secs': 0.280656} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.051090] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfigured VM instance instance-00000062 to detach disk 2002 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1113.051433] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1113.051611] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8d439e8-9969-4cdf-8c6b-70887fc5092b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.059758] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1113.059758] env[65503]: value = "task-4450751" [ 1113.059758] env[65503]: _type = "Task" [ 1113.059758] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.069718] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.223224] env[65503]: DEBUG oslo_vmware.api [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149325} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.223532] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.223736] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.223915] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.224122] env[65503]: INFO nova.compute.manager [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1113.224390] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1113.224598] env[65503]: DEBUG nova.compute.manager [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1113.224714] env[65503]: DEBUG nova.network.neutron [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1113.225014] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1113.225597] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1113.225881] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1113.265306] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1113.349385] env[65503]: DEBUG oslo_vmware.api [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450750, 'name': PowerOffVM_Task, 'duration_secs': 0.257286} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.350037] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1113.350037] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1113.350445] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb28a1c9-ce02-47e9-b74f-54c230c26d3f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.425918] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1113.426259] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1113.426490] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleting the datastore file [datastore2] 1d1a96cc-63b3-472c-b94a-1ea00763f770 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1113.426957] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33a2e5a0-eac0-4a91-bdb8-9b4f65777028 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.435239] env[65503]: DEBUG oslo_vmware.api [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for the task: (returnval){ [ 1113.435239] env[65503]: value = "task-4450753" [ 1113.435239] env[65503]: _type = "Task" [ 1113.435239] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.448477] env[65503]: DEBUG oslo_vmware.api [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.517780] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e37758cc-7287-4271-ad47-d711201d0add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518150] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518236] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518381] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 1d1a96cc-63b3-472c-b94a-1ea00763f770 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518457] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b7f55645-f152-4fc9-a962-393f9a9b9c55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518581] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518750] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b00a98a4-4865-4a02-a353-3d1da9ef0e51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518887] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance eb18fc5c-168b-4442-af66-15e255ecc535 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.518986] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bc700871-233c-4aa0-867e-4f166b6f54d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.519132] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1113.519291] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1113.519444] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '10', 'num_vm_active': '7', 'num_task_None': '6', 'num_os_type_None': '10', 'num_proj_d3ade9ce0dc44449bb7a3bf0c624e366': '3', 'io_workload': '0', 'num_vm_rescued': '2', 'num_proj_592efb180976432cbcecb9ad421e1bd1': '1', 'num_proj_521d40776571452e85178972f97c8622': '2', 'num_task_deleting': '2', 'num_proj_be67f50c5bc447309d4c04f3f2805455': '1', 'num_task_unrescuing': '1', 'num_proj_3658921b747e4d78a2046b838cb36d26': '1', 'num_vm_stopped': '1', 'num_proj_34e8cd66745a40d2acebbce98050ee5d': '1', 'num_task_image_pending_upload': '1', 'num_proj_8c5b0c3771b5434992cd58e1af539bde': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1113.548425] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450746, 'name': CreateSnapshot_Task, 'duration_secs': 0.603037} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.550253] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1113.551613] env[65503]: DEBUG nova.compute.manager [req-09bdc543-4d70-466b-8b0b-5489ab9bd267 req-d0d62c14-bece-4a94-b7dd-9e6b6b6fbe9d service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Received event network-vif-deleted-0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1113.551613] env[65503]: INFO nova.compute.manager [req-09bdc543-4d70-466b-8b0b-5489ab9bd267 req-d0d62c14-bece-4a94-b7dd-9e6b6b6fbe9d service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Neutron deleted interface 0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80; detaching it from the instance and deleting it from the info cache [ 1113.551613] env[65503]: DEBUG nova.network.neutron [req-09bdc543-4d70-466b-8b0b-5489ab9bd267 req-d0d62c14-bece-4a94-b7dd-9e6b6b6fbe9d service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1113.553180] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07092860-f208-4fd8-ad39-e740d9599344 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.578976] env[65503]: DEBUG oslo_vmware.api [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450751, 'name': PowerOnVM_Task, 'duration_secs': 0.393363} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.579282] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1113.579503] env[65503]: DEBUG nova.compute.manager [None req-02f5b6ff-bb9f-4c0c-a709-8bebcb283f93 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1113.580321] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb1e79d-d7cd-4280-9120-b1bf53816a09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.676942] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cc9b0b-dd92-44f0-9781-79d115516296 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.685805] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdedcadf-6226-4df9-8212-7c7889ac9685 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.718609] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eefc70-f839-48e1-88cd-44e46c311f8e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.727388] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c70bc2a-ea5a-4397-8af8-5579556e7126 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.744271] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.948412] env[65503]: DEBUG oslo_vmware.api [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Task: {'id': task-4450753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20412} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.949665] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.949942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.951210] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.951419] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.951603] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.951801] env[65503]: INFO nova.compute.manager [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1113.952039] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1113.952634] env[65503]: DEBUG nova.compute.manager [-] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1113.952736] env[65503]: DEBUG nova.network.neutron [-] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1113.952966] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1113.953496] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1113.953769] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1114.004318] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1114.011912] env[65503]: DEBUG nova.network.neutron [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1114.057671] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f5cf7a4-8d3d-4da1-a9ed-5642722a0c7b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.068498] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051e025d-9209-4144-b333-21e6e75869d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.087270] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1114.087270] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1ffca901-8435-4558-9efb-c42235b7e30d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.098799] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1114.098799] env[65503]: value = "task-4450754" [ 1114.098799] env[65503]: _type = "Task" [ 1114.098799] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.114013] env[65503]: DEBUG nova.compute.manager [req-09bdc543-4d70-466b-8b0b-5489ab9bd267 req-d0d62c14-bece-4a94-b7dd-9e6b6b6fbe9d service nova] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Detach interface failed, port_id=0dfdce64-2cd1-4aa5-bfb9-867ee6d82a80, reason: Instance eb18fc5c-168b-4442-af66-15e255ecc535 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1114.121015] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450754, 'name': CloneVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.247863] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.401159] env[65503]: DEBUG nova.compute.manager [req-0541a228-a0d2-4105-91ff-73f3fa4cca21 req-b34758ab-6e23-4d41-9490-d877233b14ad service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Received event network-vif-deleted-b5a654cb-c44f-45fc-bf0c-429ff06916f0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1114.401159] env[65503]: INFO nova.compute.manager [req-0541a228-a0d2-4105-91ff-73f3fa4cca21 req-b34758ab-6e23-4d41-9490-d877233b14ad service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Neutron deleted interface b5a654cb-c44f-45fc-bf0c-429ff06916f0; detaching it from the instance and deleting it from the info cache [ 1114.401509] env[65503]: DEBUG nova.network.neutron [req-0541a228-a0d2-4105-91ff-73f3fa4cca21 req-b34758ab-6e23-4d41-9490-d877233b14ad service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1114.454341] env[65503]: DEBUG nova.compute.utils [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1114.490736] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquiring lock "7b98ff40-1580-4175-adc5-66ca8977990a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.490991] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "7b98ff40-1580-4175-adc5-66ca8977990a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.515461] env[65503]: INFO nova.compute.manager [-] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Took 1.29 seconds to deallocate network for instance. [ 1114.610915] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450754, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.725796] env[65503]: DEBUG nova.network.neutron [-] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1114.754713] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1114.755498] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.277s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.903958] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04b53c52-40b3-4f64-a7ac-2a46bf332152 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.914477] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f932d41-63b5-4be7-95fd-f44d91772854 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.947725] env[65503]: DEBUG nova.compute.manager [req-0541a228-a0d2-4105-91ff-73f3fa4cca21 req-b34758ab-6e23-4d41-9490-d877233b14ad service nova] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Detach interface failed, port_id=b5a654cb-c44f-45fc-bf0c-429ff06916f0, reason: Instance 1d1a96cc-63b3-472c-b94a-1ea00763f770 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1114.957890] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.995309] env[65503]: DEBUG nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1115.022738] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.025665] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.025665] env[65503]: DEBUG nova.objects.instance [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'resources' on Instance uuid eb18fc5c-168b-4442-af66-15e255ecc535 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.112200] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450754, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.229717] env[65503]: INFO nova.compute.manager [-] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Took 1.28 seconds to deallocate network for instance. [ 1115.525720] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.590616] env[65503]: DEBUG nova.compute.manager [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Received event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1115.591865] env[65503]: DEBUG nova.compute.manager [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing instance network info cache due to event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1115.591865] env[65503]: DEBUG oslo_concurrency.lockutils [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Acquiring lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.591865] env[65503]: DEBUG oslo_concurrency.lockutils [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Acquired lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.591865] env[65503]: DEBUG nova.network.neutron [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1115.613457] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450754, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.730645] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e09f89b-22cb-4237-bbfd-a595316c060e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.739231] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.740423] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ace125-387c-4f76-a7ea-e96b9a7e6643 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.773743] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51f3a10-c4d0-4fb9-8a9b-0fd0281e76c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.782471] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c847ac-51ae-4bda-a4d6-5825dcde59d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.798571] env[65503]: DEBUG nova.compute.provider_tree [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.038836] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.039312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.040621] env[65503]: INFO nova.compute.manager [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Attaching volume 67d27af0-2341-4a81-8a74-90b6123a2a7d to /dev/sdb [ 1116.082452] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df808bd0-5485-4f84-832a-0c0b8983db07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.091575] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77dbd16b-0a11-40ee-808a-5c45fc6264fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.102568] env[65503]: WARNING neutronclient.v2_0.client [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1116.102568] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1116.102568] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1116.113614] env[65503]: DEBUG nova.virt.block_device [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating existing volume attachment record: f13a141d-a3f7-45bd-a704-49f4e382533e {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1116.127143] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450754, 'name': CloneVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.301990] env[65503]: DEBUG nova.scheduler.client.report [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.315195] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1116.315657] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1116.454213] env[65503]: WARNING neutronclient.v2_0.client [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1116.454482] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1116.454851] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1116.567218] env[65503]: DEBUG nova.network.neutron [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updated VIF entry in instance network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1116.567620] env[65503]: DEBUG nova.network.neutron [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1116.623080] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450754, 'name': CloneVM_Task, 'duration_secs': 2.05193} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.623388] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Created linked-clone VM from snapshot [ 1116.624193] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465db57d-af7e-4798-95ed-65005b18a61a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.632499] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Uploading image fb2479da-9086-4847-99c7-c76d01b70909 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1116.664288] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1116.664596] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-41b004c1-a5d7-411b-9fb5-c279bc1facfa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.673166] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1116.673166] env[65503]: value = "task-4450758" [ 1116.673166] env[65503]: _type = "Task" [ 1116.673166] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.682772] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450758, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.808575] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.785s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.811329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.286s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.812940] env[65503]: INFO nova.compute.claims [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1116.838746] env[65503]: INFO nova.scheduler.client.report [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted allocations for instance eb18fc5c-168b-4442-af66-15e255ecc535 [ 1117.071350] env[65503]: DEBUG oslo_concurrency.lockutils [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Releasing lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.071704] env[65503]: DEBUG nova.compute.manager [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Received event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1117.071753] env[65503]: DEBUG nova.compute.manager [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing instance network info cache due to event network-changed-7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1117.071934] env[65503]: DEBUG oslo_concurrency.lockutils [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Acquiring lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.072085] env[65503]: DEBUG oslo_concurrency.lockutils [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Acquired lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.072275] env[65503]: DEBUG nova.network.neutron [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Refreshing network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1117.186203] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450758, 'name': Destroy_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.349975] env[65503]: DEBUG oslo_concurrency.lockutils [None req-149f2a6f-b7ae-4f2f-92f7-9e358161edc9 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "eb18fc5c-168b-4442-af66-15e255ecc535" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.232s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.575289] env[65503]: WARNING neutronclient.v2_0.client [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1117.576024] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1117.576470] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1117.685282] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450758, 'name': Destroy_Task, 'duration_secs': 0.516655} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.685347] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Destroyed the VM [ 1117.685574] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1117.685834] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ff2a9917-5b52-4b68-a9fa-de51a76bb15a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.694143] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1117.694143] env[65503]: value = "task-4450759" [ 1117.694143] env[65503]: _type = "Task" [ 1117.694143] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.699625] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1117.700033] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1117.713674] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450759, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.766347] env[65503]: WARNING neutronclient.v2_0.client [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1117.767267] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1117.767690] env[65503]: WARNING openstack [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1117.856214] env[65503]: DEBUG nova.network.neutron [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updated VIF entry in instance network info cache for port 7467737c-bad8-423a-85d9-f5870d27aebc. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1117.856691] env[65503]: DEBUG nova.network.neutron [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [{"id": "7467737c-bad8-423a-85d9-f5870d27aebc", "address": "fa:16:3e:97:92:c0", "network": {"id": "505b957b-910e-4cef-8cc9-e43fb7cd2d1c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1427396707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3ade9ce0dc44449bb7a3bf0c624e366", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7467737c-ba", "ovs_interfaceid": "7467737c-bad8-423a-85d9-f5870d27aebc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1117.988918] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbe876f-8641-43fa-8d7c-d0fb0bce3801 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.998839] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b533a285-cbe4-46cf-be60-105844bfc100 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.034546] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1902fa54-67bd-463a-a67c-8f329d0e2bf9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.043578] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79bbaea4-98ab-4e70-9b56-b02073dccc63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.058373] env[65503]: DEBUG nova.compute.provider_tree [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.207588] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450759, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.359661] env[65503]: DEBUG oslo_concurrency.lockutils [req-4fa35c2b-cd3f-4ae9-881d-68592fa166e7 req-830124d2-77aa-443d-a830-e333f434717d service nova] Releasing lock "refresh_cache-b7f55645-f152-4fc9-a962-393f9a9b9c55" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.562238] env[65503]: DEBUG nova.scheduler.client.report [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1118.705902] env[65503]: DEBUG oslo_vmware.api [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450759, 'name': RemoveSnapshot_Task, 'duration_secs': 0.680499} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.706295] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1119.068279] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.068692] env[65503]: DEBUG nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1119.071573] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.332s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.071819] env[65503]: DEBUG nova.objects.instance [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lazy-loading 'resources' on Instance uuid 1d1a96cc-63b3-472c-b94a-1ea00763f770 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.211220] env[65503]: WARNING nova.compute.manager [None req-cf16c39f-68cb-4f31-998c-16f9a6d338f6 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Image not found during snapshot: nova.exception.ImageNotFound: Image fb2479da-9086-4847-99c7-c76d01b70909 could not be found. [ 1119.337055] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.337294] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.574736] env[65503]: DEBUG nova.compute.utils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1119.577516] env[65503]: DEBUG nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1119.577810] env[65503]: DEBUG nova.network.neutron [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1119.578300] env[65503]: WARNING neutronclient.v2_0.client [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1119.578790] env[65503]: WARNING neutronclient.v2_0.client [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1119.579680] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1119.580786] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1119.644812] env[65503]: DEBUG nova.policy [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd826bb69d2b14e8d8dbe2121b1260eb4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b701e3d72ae1422a88e27cbb4ae8ea9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1119.759832] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f655748-23aa-4d8d-b948-ced413cade11 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.770012] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf43d2bb-2b32-4415-be3d-5c5065ea0ad4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.802377] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68866964-1d4c-4887-8f40-8239f9d29d29 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.811090] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a050e8e3-54ec-4bba-b89b-3842f5142581 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.825744] env[65503]: DEBUG nova.compute.provider_tree [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.842490] env[65503]: DEBUG nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1119.970354] env[65503]: DEBUG nova.network.neutron [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Successfully created port: 816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1120.078116] env[65503]: DEBUG nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1120.331907] env[65503]: DEBUG nova.scheduler.client.report [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.371352] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.446285] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.446474] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.446697] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.446884] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.447093] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.449480] env[65503]: INFO nova.compute.manager [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Terminating instance [ 1120.583980] env[65503]: INFO nova.virt.block_device [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Booting with volume 856eed9a-2608-4d0d-b7b3-52e61d723973 at /dev/sda [ 1120.629775] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1fef5142-6682-4de4-8c0a-def4968b9833 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.641035] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e879891e-6128-411f-bd19-ab32521bc4df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.673695] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-621fe616-3f98-4f05-917d-bbdf0bd71469 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.683631] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7066feb-9e19-49d7-ba12-1b3a5206b473 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.694887] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1120.695124] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870485', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'name': 'volume-67d27af0-2341-4a81-8a74-90b6123a2a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a', 'attached_at': '', 'detached_at': '', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'serial': '67d27af0-2341-4a81-8a74-90b6123a2a7d'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1120.695970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3eeb8e6-994e-4e1a-9d8c-721b909d62b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.723754] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3b3d17-78a1-4315-8133-3587afea4180 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.727464] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59836e75-5f7b-49f8-add5-45b85b03eebd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.748684] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d5b70f-e710-4728-a57c-71272b2f52ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.758354] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] volume-67d27af0-2341-4a81-8a74-90b6123a2a7d/volume-67d27af0-2341-4a81-8a74-90b6123a2a7d.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.758693] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a8db35b-1077-4867-885d-2d5f402f30c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.779089] env[65503]: DEBUG oslo_vmware.api [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1120.779089] env[65503]: value = "task-4450761" [ 1120.779089] env[65503]: _type = "Task" [ 1120.779089] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.786022] env[65503]: DEBUG nova.virt.block_device [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Updating existing volume attachment record: ccdfae1b-a20c-4287-92aa-1d25f5330157 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1120.794335] env[65503]: DEBUG oslo_vmware.api [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450761, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.837739] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.766s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.840581] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.469s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.842308] env[65503]: INFO nova.compute.claims [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1120.864759] env[65503]: INFO nova.scheduler.client.report [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Deleted allocations for instance 1d1a96cc-63b3-472c-b94a-1ea00763f770 [ 1120.953504] env[65503]: DEBUG nova.compute.manager [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1120.953741] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1120.954695] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc20a6e-2763-423c-906a-dbbd6d6b5bff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.964174] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1120.964553] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1197da11-cde9-4bfa-b746-7ba53ac253db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.973945] env[65503]: DEBUG oslo_vmware.api [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1120.973945] env[65503]: value = "task-4450762" [ 1120.973945] env[65503]: _type = "Task" [ 1120.973945] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.983958] env[65503]: DEBUG oslo_vmware.api [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.290127] env[65503]: DEBUG oslo_vmware.api [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450761, 'name': ReconfigVM_Task, 'duration_secs': 0.359323} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.291060] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfigured VM instance instance-00000064 to attach disk [datastore2] volume-67d27af0-2341-4a81-8a74-90b6123a2a7d/volume-67d27af0-2341-4a81-8a74-90b6123a2a7d.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.295590] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3f6c543-e9b4-4c6a-a58a-d772d7b2b34a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.313074] env[65503]: DEBUG oslo_vmware.api [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1121.313074] env[65503]: value = "task-4450763" [ 1121.313074] env[65503]: _type = "Task" [ 1121.313074] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.323594] env[65503]: DEBUG oslo_vmware.api [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450763, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.376395] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5699f092-85cc-4403-a3da-597773b932f2 tempest-AttachInterfacesTestJSON-1313165704 tempest-AttachInterfacesTestJSON-1313165704-project-member] Lock "1d1a96cc-63b3-472c-b94a-1ea00763f770" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.068s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.401979] env[65503]: DEBUG nova.compute.manager [req-65492577-18b6-4e3f-940c-95f12ee7c391 req-a1e5db84-5f41-4789-a6e4-9bb2a63cb3b3 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Received event network-vif-plugged-816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1121.401979] env[65503]: DEBUG oslo_concurrency.lockutils [req-65492577-18b6-4e3f-940c-95f12ee7c391 req-a1e5db84-5f41-4789-a6e4-9bb2a63cb3b3 service nova] Acquiring lock "7b98ff40-1580-4175-adc5-66ca8977990a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.401979] env[65503]: DEBUG oslo_concurrency.lockutils [req-65492577-18b6-4e3f-940c-95f12ee7c391 req-a1e5db84-5f41-4789-a6e4-9bb2a63cb3b3 service nova] Lock "7b98ff40-1580-4175-adc5-66ca8977990a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.401979] env[65503]: DEBUG oslo_concurrency.lockutils [req-65492577-18b6-4e3f-940c-95f12ee7c391 req-a1e5db84-5f41-4789-a6e4-9bb2a63cb3b3 service nova] Lock "7b98ff40-1580-4175-adc5-66ca8977990a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.402426] env[65503]: DEBUG nova.compute.manager [req-65492577-18b6-4e3f-940c-95f12ee7c391 req-a1e5db84-5f41-4789-a6e4-9bb2a63cb3b3 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] No waiting events found dispatching network-vif-plugged-816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1121.402634] env[65503]: WARNING nova.compute.manager [req-65492577-18b6-4e3f-940c-95f12ee7c391 req-a1e5db84-5f41-4789-a6e4-9bb2a63cb3b3 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Received unexpected event network-vif-plugged-816cbefe-fa43-42dd-8737-9679d9a453e8 for instance with vm_state building and task_state block_device_mapping. [ 1121.485858] env[65503]: DEBUG oslo_vmware.api [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450762, 'name': PowerOffVM_Task, 'duration_secs': 0.229221} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.486149] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.486339] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1121.486610] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15864266-dd9d-4c40-960d-f45dbedb1bec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.509548] env[65503]: DEBUG nova.network.neutron [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Successfully updated port: 816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1121.565746] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1121.566044] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1121.566289] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleting the datastore file [datastore2] dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1121.566588] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9b982be-d26a-4ed3-9b54-67fc9dd77739 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.575032] env[65503]: DEBUG oslo_vmware.api [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for the task: (returnval){ [ 1121.575032] env[65503]: value = "task-4450765" [ 1121.575032] env[65503]: _type = "Task" [ 1121.575032] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.584396] env[65503]: DEBUG oslo_vmware.api [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.823933] env[65503]: DEBUG oslo_vmware.api [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450763, 'name': ReconfigVM_Task, 'duration_secs': 0.168318} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.824272] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870485', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'name': 'volume-67d27af0-2341-4a81-8a74-90b6123a2a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a', 'attached_at': '', 'detached_at': '', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'serial': '67d27af0-2341-4a81-8a74-90b6123a2a7d'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1122.011058] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd36ad57-1f03-4a23-a26f-742d603d8dd6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.014756] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquiring lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.014953] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquired lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.015102] env[65503]: DEBUG nova.network.neutron [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1122.024907] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b635a5f1-c349-4491-ae81-11e17f26411c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.059254] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fd0137-e24b-440e-954d-8dd4543bcb6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.066631] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e40dbf5-4587-4562-9922-513b0b7a2be1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.082081] env[65503]: DEBUG nova.compute.provider_tree [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.093616] env[65503]: DEBUG oslo_vmware.api [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Task: {'id': task-4450765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165853} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.094641] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.094841] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.095025] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.095201] env[65503]: INFO nova.compute.manager [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1122.095486] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1122.095681] env[65503]: DEBUG nova.compute.manager [-] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1122.095775] env[65503]: DEBUG nova.network.neutron [-] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1122.096023] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1122.096552] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1122.096805] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1122.136718] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1122.519787] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1122.521064] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1122.559829] env[65503]: DEBUG nova.network.neutron [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1122.579319] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1122.579722] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1122.589037] env[65503]: DEBUG nova.scheduler.client.report [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.648531] env[65503]: WARNING neutronclient.v2_0.client [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1122.649370] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1122.649734] env[65503]: WARNING openstack [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1122.784786] env[65503]: DEBUG nova.network.neutron [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Updating instance_info_cache with network_info: [{"id": "816cbefe-fa43-42dd-8737-9679d9a453e8", "address": "fa:16:3e:8b:26:f3", "network": {"id": "7b9e1f46-ccfc-47a4-a361-c45c8eab6f7a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1976781837-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b701e3d72ae1422a88e27cbb4ae8ea9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap816cbefe-fa", "ovs_interfaceid": "816cbefe-fa43-42dd-8737-9679d9a453e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1122.831592] env[65503]: DEBUG nova.network.neutron [-] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1122.878441] env[65503]: DEBUG nova.objects.instance [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'flavor' on Instance uuid 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.883061] env[65503]: DEBUG nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1122.884029] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1122.884386] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1122.884672] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1122.884982] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1122.885197] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1122.885431] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1122.885636] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1122.885915] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1122.885951] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1122.886099] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1122.886409] env[65503]: DEBUG nova.virt.hardware [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1122.887381] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5260d3-8daa-4a6e-add3-7e59e21bde57 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.898224] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d62d70-0027-49f6-bd9f-0a825aa44522 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.093700] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.094433] env[65503]: DEBUG nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1123.293942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Releasing lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.294424] env[65503]: DEBUG nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance network_info: |[{"id": "816cbefe-fa43-42dd-8737-9679d9a453e8", "address": "fa:16:3e:8b:26:f3", "network": {"id": "7b9e1f46-ccfc-47a4-a361-c45c8eab6f7a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1976781837-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b701e3d72ae1422a88e27cbb4ae8ea9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap816cbefe-fa", "ovs_interfaceid": "816cbefe-fa43-42dd-8737-9679d9a453e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1123.294901] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:26:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '816cbefe-fa43-42dd-8737-9679d9a453e8', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1123.302833] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Creating folder: Project (b701e3d72ae1422a88e27cbb4ae8ea9b). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1123.303925] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6ee7275-3824-4362-bb47-4d0fdb4a5233 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.319126] env[65503]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1123.319381] env[65503]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=65503) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1123.319776] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Folder already exists: Project (b701e3d72ae1422a88e27cbb4ae8ea9b). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1123.320415] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Creating folder: Instances. Parent ref: group-v870475. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1123.320634] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5527267-3429-4380-b76c-dedbc278717b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.334691] env[65503]: INFO nova.compute.manager [-] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Took 1.24 seconds to deallocate network for instance. [ 1123.335138] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Created folder: Instances in parent group-v870475. [ 1123.335291] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1123.337195] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1123.341302] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd3e1802-7a2a-4ca5-a773-f29cd53b3c8f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.363973] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1123.363973] env[65503]: value = "task-4450768" [ 1123.363973] env[65503]: _type = "Task" [ 1123.363973] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.373704] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450768, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.388481] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ecdb265-ba18-46eb-9a00-292b357f871e tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.349s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.461458] env[65503]: DEBUG nova.compute.manager [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Received event network-changed-816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1123.461861] env[65503]: DEBUG nova.compute.manager [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Refreshing instance network info cache due to event network-changed-816cbefe-fa43-42dd-8737-9679d9a453e8. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1123.462076] env[65503]: DEBUG oslo_concurrency.lockutils [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Acquiring lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.462106] env[65503]: DEBUG oslo_concurrency.lockutils [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Acquired lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.462287] env[65503]: DEBUG nova.network.neutron [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Refreshing network info cache for port 816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1123.599992] env[65503]: DEBUG nova.compute.utils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1123.602931] env[65503]: DEBUG nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1123.602931] env[65503]: DEBUG nova.network.neutron [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1123.602931] env[65503]: WARNING neutronclient.v2_0.client [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1123.602931] env[65503]: WARNING neutronclient.v2_0.client [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1123.603620] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1123.603997] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1123.656687] env[65503]: DEBUG nova.policy [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9955668c2464ddfb0eae34aa700ddd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '521d40776571452e85178972f97c8622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1123.859468] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.859726] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.859948] env[65503]: DEBUG nova.objects.instance [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lazy-loading 'resources' on Instance uuid dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.881213] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450768, 'name': CreateVM_Task, 'duration_secs': 0.322432} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.881401] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.882154] env[65503]: WARNING neutronclient.v2_0.client [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1123.882530] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'device_type': None, 'attachment_id': 'ccdfae1b-a20c-4287-92aa-1d25f5330157', 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870481', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'name': 'volume-856eed9a-2608-4d0d-b7b3-52e61d723973', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b98ff40-1580-4175-adc5-66ca8977990a', 'attached_at': '', 'detached_at': '', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'serial': '856eed9a-2608-4d0d-b7b3-52e61d723973'}, 'volume_type': None}], 'swap': None} {{(pid=65503) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1123.882728] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Root volume attach. Driver type: vmdk {{(pid=65503) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1123.883687] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3206d85-22dd-44cd-b0fe-ac0b7d263cb7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.893183] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f58c658-1dd3-49b5-adf1-8e3d856ffc40 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.900756] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f683c03-c1fe-4130-bab6-ffe93728289a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.908230] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-67bb35c5-4a0f-4ad2-b5fe-0a3bf2101b73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.917776] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1123.917776] env[65503]: value = "task-4450769" [ 1123.917776] env[65503]: _type = "Task" [ 1123.917776] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.927302] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450769, 'name': RelocateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.965968] env[65503]: WARNING neutronclient.v2_0.client [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1123.970022] env[65503]: WARNING openstack [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1123.970022] env[65503]: WARNING openstack [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1123.992600] env[65503]: DEBUG nova.network.neutron [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Successfully created port: 409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1124.114744] env[65503]: DEBUG nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1124.200680] env[65503]: WARNING openstack [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1124.201207] env[65503]: WARNING openstack [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1124.293599] env[65503]: DEBUG nova.compute.manager [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1124.429910] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450769, 'name': RelocateVM_Task, 'duration_secs': 0.430891} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.430131] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1124.430331] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870481', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'name': 'volume-856eed9a-2608-4d0d-b7b3-52e61d723973', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b98ff40-1580-4175-adc5-66ca8977990a', 'attached_at': '', 'detached_at': '', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'serial': '856eed9a-2608-4d0d-b7b3-52e61d723973'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1124.434590] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6424826e-f3cf-4881-b386-2c62d029349d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.458428] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f5a289-f29b-470a-a3e8-6a446bb3e950 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.461648] env[65503]: WARNING neutronclient.v2_0.client [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1124.462371] env[65503]: WARNING openstack [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1124.462715] env[65503]: WARNING openstack [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1124.492386] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] volume-856eed9a-2608-4d0d-b7b3-52e61d723973/volume-856eed9a-2608-4d0d-b7b3-52e61d723973.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1124.495548] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a0367c9-f40f-416a-9cb5-36659695b550 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.517121] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1124.517121] env[65503]: value = "task-4450770" [ 1124.517121] env[65503]: _type = "Task" [ 1124.517121] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.526593] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450770, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.596628] env[65503]: DEBUG nova.network.neutron [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Updated VIF entry in instance network info cache for port 816cbefe-fa43-42dd-8737-9679d9a453e8. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1124.596990] env[65503]: DEBUG nova.network.neutron [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Updating instance_info_cache with network_info: [{"id": "816cbefe-fa43-42dd-8737-9679d9a453e8", "address": "fa:16:3e:8b:26:f3", "network": {"id": "7b9e1f46-ccfc-47a4-a361-c45c8eab6f7a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1976781837-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b701e3d72ae1422a88e27cbb4ae8ea9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap816cbefe-fa", "ovs_interfaceid": "816cbefe-fa43-42dd-8737-9679d9a453e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1124.643926] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe224d5-b566-47b7-b49c-e2600cd0c3df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.655227] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd1e2b7-c8d1-4e6d-9bc0-74e3090a70c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.691988] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a64724-142f-437f-bcfc-e36baa5409f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.701838] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd682ac6-df97-440c-ad1b-fef4303e4795 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.719126] env[65503]: DEBUG nova.compute.provider_tree [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.821909] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.028863] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.099827] env[65503]: DEBUG oslo_concurrency.lockutils [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] Releasing lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.100182] env[65503]: DEBUG nova.compute.manager [req-3667eee7-98c1-4e76-a412-acd2f9ddef8c req-200db8f3-3aad-48a1-a86c-3024b63b6de4 service nova] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Received event network-vif-deleted-d65d8d7d-0e52-489e-854b-0090c2083876 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1125.127139] env[65503]: DEBUG nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1125.158176] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1125.158176] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.158176] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1125.159089] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.159396] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1125.159654] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1125.159960] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1125.160245] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1125.160561] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1125.161748] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1125.161748] env[65503]: DEBUG nova.virt.hardware [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1125.161978] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a81b32-9705-4e6f-93c4-c690068f2b1b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.171287] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6126505c-549b-423b-a30f-aa0bf9a46ad9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.225349] env[65503]: DEBUG nova.scheduler.client.report [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.529344] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450770, 'name': ReconfigVM_Task, 'duration_secs': 0.622963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.529679] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] volume-856eed9a-2608-4d0d-b7b3-52e61d723973/volume-856eed9a-2608-4d0d-b7b3-52e61d723973.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1125.534646] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9740dfd-2595-4a9d-b592-6cad4447df1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.552731] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1125.552731] env[65503]: value = "task-4450771" [ 1125.552731] env[65503]: _type = "Task" [ 1125.552731] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.562146] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.696974] env[65503]: DEBUG nova.network.neutron [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Successfully updated port: 409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1125.733176] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.871s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.734083] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.912s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.762178] env[65503]: DEBUG nova.compute.manager [req-8ab47022-462c-41ac-b532-3a6bf4519e6b req-9fb752d6-c374-4c0a-a05a-1730ed7c9055 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Received event network-vif-plugged-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1125.762178] env[65503]: DEBUG oslo_concurrency.lockutils [req-8ab47022-462c-41ac-b532-3a6bf4519e6b req-9fb752d6-c374-4c0a-a05a-1730ed7c9055 service nova] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.762178] env[65503]: DEBUG oslo_concurrency.lockutils [req-8ab47022-462c-41ac-b532-3a6bf4519e6b req-9fb752d6-c374-4c0a-a05a-1730ed7c9055 service nova] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.762442] env[65503]: DEBUG oslo_concurrency.lockutils [req-8ab47022-462c-41ac-b532-3a6bf4519e6b req-9fb752d6-c374-4c0a-a05a-1730ed7c9055 service nova] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.762739] env[65503]: DEBUG nova.compute.manager [req-8ab47022-462c-41ac-b532-3a6bf4519e6b req-9fb752d6-c374-4c0a-a05a-1730ed7c9055 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] No waiting events found dispatching network-vif-plugged-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1125.762822] env[65503]: WARNING nova.compute.manager [req-8ab47022-462c-41ac-b532-3a6bf4519e6b req-9fb752d6-c374-4c0a-a05a-1730ed7c9055 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Received unexpected event network-vif-plugged-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 for instance with vm_state building and task_state spawning. [ 1125.763958] env[65503]: INFO nova.scheduler.client.report [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Deleted allocations for instance dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f [ 1126.063734] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450771, 'name': ReconfigVM_Task, 'duration_secs': 0.150852} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.063979] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870481', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'name': 'volume-856eed9a-2608-4d0d-b7b3-52e61d723973', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b98ff40-1580-4175-adc5-66ca8977990a', 'attached_at': '', 'detached_at': '', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'serial': '856eed9a-2608-4d0d-b7b3-52e61d723973'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1126.064655] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-affeea35-1594-4dd4-b3fe-f914437b706e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.073499] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1126.073499] env[65503]: value = "task-4450772" [ 1126.073499] env[65503]: _type = "Task" [ 1126.073499] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.083875] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450772, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.206065] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.206065] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.206065] env[65503]: DEBUG nova.network.neutron [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1126.239652] env[65503]: INFO nova.compute.claims [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.271992] env[65503]: DEBUG oslo_concurrency.lockutils [None req-612f79df-fa93-4fa3-a242-f02ecd766ec8 tempest-ImagesTestJSON-2086755152 tempest-ImagesTestJSON-2086755152-project-member] Lock "dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.825s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.586889] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450772, 'name': Rename_Task, 'duration_secs': 0.141366} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.587188] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1126.587511] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3958c8b8-8309-4d66-83c3-d8b6e8c99251 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.595782] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1126.595782] env[65503]: value = "task-4450773" [ 1126.595782] env[65503]: _type = "Task" [ 1126.595782] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.605309] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450773, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.709982] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1126.710599] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1126.747481] env[65503]: INFO nova.compute.resource_tracker [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating resource usage from migration a4e8ae1c-ef52-421f-b917-f181609c1865 [ 1126.750792] env[65503]: DEBUG nova.network.neutron [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1126.781756] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1126.782218] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1126.896134] env[65503]: WARNING neutronclient.v2_0.client [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1126.896891] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1126.897303] env[65503]: WARNING openstack [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1126.960102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058e6b6d-6e5b-471c-916a-e8b84061955b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.970797] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f7cf90-ad33-486b-a2d8-9740a45ef39c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.009415] env[65503]: DEBUG nova.network.neutron [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updating instance_info_cache with network_info: [{"id": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "address": "fa:16:3e:d3:ca:60", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409dc91c-74", "ovs_interfaceid": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1127.014128] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f295dcf-d804-4dbb-a02a-c390c1570fb8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.021863] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e042046-daf7-411c-84a1-4c7d10d1266f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.039388] env[65503]: DEBUG nova.compute.provider_tree [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.107076] env[65503]: DEBUG oslo_vmware.api [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450773, 'name': PowerOnVM_Task, 'duration_secs': 0.495429} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.107495] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1127.107712] env[65503]: INFO nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Took 4.22 seconds to spawn the instance on the hypervisor. [ 1127.107893] env[65503]: DEBUG nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1127.108894] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ab2218-51f5-482e-8d6e-1d200433a608 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.513354] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.513354] env[65503]: DEBUG nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Instance network_info: |[{"id": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "address": "fa:16:3e:d3:ca:60", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409dc91c-74", "ovs_interfaceid": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1127.514259] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:ca:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a7ba8d0-0208-4af7-af44-2a5ad382f9be', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.522194] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1127.522856] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.522856] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-791ac96c-f59f-45d3-9f5b-2ef45f91500e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.542981] env[65503]: DEBUG nova.scheduler.client.report [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.547900] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.547900] env[65503]: value = "task-4450774" [ 1127.547900] env[65503]: _type = "Task" [ 1127.547900] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.558048] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450774, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.635351] env[65503]: INFO nova.compute.manager [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Took 12.13 seconds to build instance. [ 1127.810644] env[65503]: DEBUG nova.compute.manager [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Received event network-changed-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1127.810644] env[65503]: DEBUG nova.compute.manager [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Refreshing instance network info cache due to event network-changed-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1127.810940] env[65503]: DEBUG oslo_concurrency.lockutils [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Acquiring lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.810940] env[65503]: DEBUG oslo_concurrency.lockutils [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Acquired lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.811013] env[65503]: DEBUG nova.network.neutron [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Refreshing network info cache for port 409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1128.049729] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.316s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.049887] env[65503]: INFO nova.compute.manager [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Migrating [ 1128.081173] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450774, 'name': CreateVM_Task, 'duration_secs': 0.382443} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.081173] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.081173] env[65503]: WARNING neutronclient.v2_0.client [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1128.081173] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.081173] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.081173] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1128.081173] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c2b1fa7-6ff6-4caa-99da-d5406a4fbd93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.090251] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1128.090251] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528dca39-4246-7d4a-b601-d3cd9438e6e7" [ 1128.090251] env[65503]: _type = "Task" [ 1128.090251] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.105786] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528dca39-4246-7d4a-b601-d3cd9438e6e7, 'name': SearchDatastore_Task, 'duration_secs': 0.011345} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.106210] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.106693] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.106944] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.107103] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.107685] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.107685] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-796d3485-7994-4824-9593-ae4cd09697a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.119520] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.119520] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.119869] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f9a2a98-7d6b-47dd-b8ee-7913ec60cbcb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.126457] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1128.126457] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5273e364-ad7d-25d1-b084-76b11cccee87" [ 1128.126457] env[65503]: _type = "Task" [ 1128.126457] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.139506] env[65503]: DEBUG oslo_concurrency.lockutils [None req-ac112e5e-f59a-4441-af9a-12e427ea5456 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "7b98ff40-1580-4175-adc5-66ca8977990a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.648s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.139867] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5273e364-ad7d-25d1-b084-76b11cccee87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.255249] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.256016] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.313329] env[65503]: WARNING neutronclient.v2_0.client [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1128.315172] env[65503]: WARNING openstack [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1128.315172] env[65503]: WARNING openstack [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1128.574822] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.575071] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.575322] env[65503]: DEBUG nova.network.neutron [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1128.643150] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5273e364-ad7d-25d1-b084-76b11cccee87, 'name': SearchDatastore_Task, 'duration_secs': 0.013759} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.649647] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e9d539b-ff45-4e75-86f0-7f6a8b990b7e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.658179] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1128.658179] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a91430-b925-3301-9cd8-1d565cb169d2" [ 1128.658179] env[65503]: _type = "Task" [ 1128.658179] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.668075] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a91430-b925-3301-9cd8-1d565cb169d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.700958] env[65503]: DEBUG nova.compute.manager [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Received event network-changed-816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1128.701232] env[65503]: DEBUG nova.compute.manager [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Refreshing instance network info cache due to event network-changed-816cbefe-fa43-42dd-8737-9679d9a453e8. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1128.701450] env[65503]: DEBUG oslo_concurrency.lockutils [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Acquiring lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.701750] env[65503]: DEBUG oslo_concurrency.lockutils [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Acquired lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.701951] env[65503]: DEBUG nova.network.neutron [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Refreshing network info cache for port 816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1128.707492] env[65503]: WARNING openstack [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1128.707846] env[65503]: WARNING openstack [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1128.760771] env[65503]: DEBUG nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1128.785274] env[65503]: WARNING neutronclient.v2_0.client [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1128.785969] env[65503]: WARNING openstack [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1128.786333] env[65503]: WARNING openstack [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1128.874495] env[65503]: DEBUG nova.network.neutron [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updated VIF entry in instance network info cache for port 409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1128.874922] env[65503]: DEBUG nova.network.neutron [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updating instance_info_cache with network_info: [{"id": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "address": "fa:16:3e:d3:ca:60", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409dc91c-74", "ovs_interfaceid": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1129.084022] env[65503]: WARNING neutronclient.v2_0.client [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.084022] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.084022] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.174813] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a91430-b925-3301-9cd8-1d565cb169d2, 'name': SearchDatastore_Task, 'duration_secs': 0.01883} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.175171] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.175476] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.175841] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c9d3547-2e19-4078-82d1-fa9d5dc46b3d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.186440] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1129.186440] env[65503]: value = "task-4450775" [ 1129.186440] env[65503]: _type = "Task" [ 1129.186440] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.204039] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450775, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.206027] env[65503]: WARNING neutronclient.v2_0.client [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.206533] env[65503]: WARNING openstack [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.206906] env[65503]: WARNING openstack [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.235034] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.235484] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.296933] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.297308] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.298993] env[65503]: INFO nova.compute.claims [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1129.309288] env[65503]: WARNING neutronclient.v2_0.client [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.310040] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.310448] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.360937] env[65503]: WARNING openstack [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.361415] env[65503]: WARNING openstack [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.377800] env[65503]: DEBUG oslo_concurrency.lockutils [req-0afa0ec4-4ffc-4793-b970-efe5692bfd9f req-6146f431-227c-4ea9-ba45-402b46fe0821 service nova] Releasing lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.438218] env[65503]: DEBUG nova.network.neutron [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1129.493631] env[65503]: WARNING neutronclient.v2_0.client [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.494435] env[65503]: WARNING openstack [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.494774] env[65503]: WARNING openstack [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.616799] env[65503]: DEBUG nova.network.neutron [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Updated VIF entry in instance network info cache for port 816cbefe-fa43-42dd-8737-9679d9a453e8. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1129.617368] env[65503]: DEBUG nova.network.neutron [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Updating instance_info_cache with network_info: [{"id": "816cbefe-fa43-42dd-8737-9679d9a453e8", "address": "fa:16:3e:8b:26:f3", "network": {"id": "7b9e1f46-ccfc-47a4-a361-c45c8eab6f7a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1976781837-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b701e3d72ae1422a88e27cbb4ae8ea9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap816cbefe-fa", "ovs_interfaceid": "816cbefe-fa43-42dd-8737-9679d9a453e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1129.701638] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450775, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.941313] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.119819] env[65503]: DEBUG oslo_concurrency.lockutils [req-8bc26062-225b-4e1e-bed8-a6206d85273c req-8b6f33d1-bfa8-433c-b68e-219cfa6be0c6 service nova] Releasing lock "refresh_cache-7b98ff40-1580-4175-adc5-66ca8977990a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.198446] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450775, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622057} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.198726] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1130.198952] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1130.199229] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cccad45e-1401-4472-b09f-6cf990512723 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.207154] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1130.207154] env[65503]: value = "task-4450776" [ 1130.207154] env[65503]: _type = "Task" [ 1130.207154] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.218487] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.487579] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf12c33e-bc24-4c01-827c-e99ca4e5b2b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.497666] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec3aae8-1bcb-48f3-ac06-3d055f59af9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.530832] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96522a8d-ea66-4df7-9f22-0f106bb9bf49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.541222] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2dbc96-c72d-4f3d-93bd-2cc556b774e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.557466] env[65503]: DEBUG nova.compute.provider_tree [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.718375] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114641} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.718664] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1130.719551] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d50f4f-c2d8-40dc-adc0-894cca964d27 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.745110] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.745492] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b55b6b8f-ee9e-46b2-accd-75a629c6de8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.767194] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1130.767194] env[65503]: value = "task-4450777" [ 1130.767194] env[65503]: _type = "Task" [ 1130.767194] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.776655] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450777, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.062919] env[65503]: DEBUG nova.scheduler.client.report [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.278375] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450777, 'name': ReconfigVM_Task, 'duration_secs': 0.305802} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.278705] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfigured VM instance instance-0000006a to attach disk [datastore2] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.279394] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25d4db1a-a8a6-409a-9aa0-4452d7da0c0f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.287574] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1131.287574] env[65503]: value = "task-4450778" [ 1131.287574] env[65503]: _type = "Task" [ 1131.287574] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.297635] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450778, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.460895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddc17d6-b77e-430d-a6cb-036e60a127ed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.486385] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1131.566760] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.568878] env[65503]: DEBUG nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1131.799697] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450778, 'name': Rename_Task, 'duration_secs': 0.163709} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.799697] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.799876] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7c19054-efc0-4cdc-a745-96ac5cfbdd3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.808740] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1131.808740] env[65503]: value = "task-4450779" [ 1131.808740] env[65503]: _type = "Task" [ 1131.808740] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.817851] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.993104] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1131.994139] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3461bea1-e7b8-4046-9dfd-e853f91695ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.003495] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1132.003495] env[65503]: value = "task-4450780" [ 1132.003495] env[65503]: _type = "Task" [ 1132.003495] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.012935] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.072600] env[65503]: DEBUG nova.compute.utils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1132.074169] env[65503]: DEBUG nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1132.074652] env[65503]: DEBUG nova.network.neutron [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1132.074752] env[65503]: WARNING neutronclient.v2_0.client [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1132.074992] env[65503]: WARNING neutronclient.v2_0.client [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1132.075674] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1132.076231] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1132.128566] env[65503]: DEBUG nova.policy [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07ffde40fb9f4aee8cbe3e13b3f6fd6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da471ceaf0f348cd87e42d3c03fdfeb8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1132.321514] env[65503]: DEBUG oslo_vmware.api [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450779, 'name': PowerOnVM_Task, 'duration_secs': 0.493415} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.321789] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.322095] env[65503]: INFO nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Took 7.19 seconds to spawn the instance on the hypervisor. [ 1132.322298] env[65503]: DEBUG nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1132.323162] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05f9e80-622d-46ad-99fe-d53772245bbc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.459108] env[65503]: DEBUG nova.network.neutron [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Successfully created port: f222ff54-daca-43ba-8e76-24669d7878e6 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1132.521498] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450780, 'name': PowerOffVM_Task, 'duration_secs': 0.298889} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.523350] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.523350] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1132.585442] env[65503]: DEBUG nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1132.842371] env[65503]: INFO nova.compute.manager [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Took 12.50 seconds to build instance. [ 1133.031669] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1133.031962] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.032966] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1133.032966] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.032966] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1133.032966] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1133.032966] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1133.032966] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1133.033774] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1133.033774] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1133.033774] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1133.038598] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6018750-9ab7-45ae-a8a4-2bbe6a21c6bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.056277] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1133.056277] env[65503]: value = "task-4450781" [ 1133.056277] env[65503]: _type = "Task" [ 1133.056277] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.065595] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450781, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.324472] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.324764] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.324897] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.325033] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.325206] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1133.325410] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.344419] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a76ee8f3-a5fc-499f-9758-793fd75333d5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.007s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.568455] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450781, 'name': ReconfigVM_Task, 'duration_secs': 0.393968} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.569101] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1133.598078] env[65503]: DEBUG nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1133.622200] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1133.622480] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.622648] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1133.622837] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.622977] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1133.623232] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1133.623395] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1133.623555] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1133.623720] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1133.623886] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1133.624052] env[65503]: DEBUG nova.virt.hardware [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1133.624964] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297e1190-404c-49b3-88d9-825b8442ac41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.635718] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7427f84e-4dd3-420a-a577-f03d98013025 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.724224] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "bc700871-233c-4aa0-867e-4f166b6f54d1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.724458] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.828676] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.828807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.828896] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.829066] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1133.832102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47be342-313d-4d24-a717-bb60b483a4c0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.840868] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d99357-de76-457a-8504-f05303359eff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.858170] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5f3bcd-14c7-4efa-8b60-6289a5792e76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.868023] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e653b031-2f8f-431e-98b2-e2ef757a0701 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.900417] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178973MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1133.900592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.900877] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.022327] env[65503]: DEBUG nova.network.neutron [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Successfully updated port: f222ff54-daca-43ba-8e76-24669d7878e6 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1134.076229] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1134.076229] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1134.076229] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1134.076229] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1134.076229] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1134.076474] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1134.076474] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1134.076612] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1134.076800] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1134.076932] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1134.077469] env[65503]: DEBUG nova.virt.hardware [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1134.085545] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1134.085545] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-218a8c1e-5175-4dd1-869a-08e7138971da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.106138] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1134.106138] env[65503]: value = "task-4450782" [ 1134.106138] env[65503]: _type = "Task" [ 1134.106138] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.118056] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450782, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.231035] env[65503]: DEBUG nova.compute.utils [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1134.281085] env[65503]: DEBUG nova.compute.manager [req-cd6ff749-5ba0-4347-b286-6b330e3466e3 req-31ccc53a-af0e-4b5f-96c1-ed49c15a3e76 service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Received event network-vif-plugged-f222ff54-daca-43ba-8e76-24669d7878e6 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1134.281285] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd6ff749-5ba0-4347-b286-6b330e3466e3 req-31ccc53a-af0e-4b5f-96c1-ed49c15a3e76 service nova] Acquiring lock "bc0dad37-fc1d-4edc-8680-dba294dd724e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.281548] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd6ff749-5ba0-4347-b286-6b330e3466e3 req-31ccc53a-af0e-4b5f-96c1-ed49c15a3e76 service nova] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.281735] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd6ff749-5ba0-4347-b286-6b330e3466e3 req-31ccc53a-af0e-4b5f-96c1-ed49c15a3e76 service nova] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.281902] env[65503]: DEBUG nova.compute.manager [req-cd6ff749-5ba0-4347-b286-6b330e3466e3 req-31ccc53a-af0e-4b5f-96c1-ed49c15a3e76 service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] No waiting events found dispatching network-vif-plugged-f222ff54-daca-43ba-8e76-24669d7878e6 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1134.282257] env[65503]: WARNING nova.compute.manager [req-cd6ff749-5ba0-4347-b286-6b330e3466e3 req-31ccc53a-af0e-4b5f-96c1-ed49c15a3e76 service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Received unexpected event network-vif-plugged-f222ff54-daca-43ba-8e76-24669d7878e6 for instance with vm_state building and task_state spawning. [ 1134.301350] env[65503]: DEBUG nova.compute.manager [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Received event network-changed-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1134.301543] env[65503]: DEBUG nova.compute.manager [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Refreshing instance network info cache due to event network-changed-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1134.301751] env[65503]: DEBUG oslo_concurrency.lockutils [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Acquiring lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.301886] env[65503]: DEBUG oslo_concurrency.lockutils [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Acquired lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.302050] env[65503]: DEBUG nova.network.neutron [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Refreshing network info cache for port 409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1134.525615] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.525942] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.525942] env[65503]: DEBUG nova.network.neutron [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1134.617214] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450782, 'name': ReconfigVM_Task, 'duration_secs': 0.469032} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.617214] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1134.617963] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8eba95-cb33-4635-86a9-a6e9c1052027 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.644952] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.645230] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bdcb9ca-f5ab-46b5-a7a6-55dd9553c106 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.665474] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1134.665474] env[65503]: value = "task-4450783" [ 1134.665474] env[65503]: _type = "Task" [ 1134.665474] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.676235] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450783, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.734446] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.806116] env[65503]: WARNING neutronclient.v2_0.client [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1134.806925] env[65503]: WARNING openstack [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1134.807811] env[65503]: WARNING openstack [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1134.897764] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.897996] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.914153] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Applying migration context for instance 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a as it has an incoming, in-progress migration a4e8ae1c-ef52-421f-b917-f181609c1865. Migration status is migrating {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1134.915815] env[65503]: INFO nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating resource usage from migration a4e8ae1c-ef52-421f-b917-f181609c1865 [ 1134.926277] env[65503]: WARNING openstack [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1134.926690] env[65503]: WARNING openstack [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1134.937823] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e37758cc-7287-4271-ad47-d711201d0add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.938044] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.938217] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.938382] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b7f55645-f152-4fc9-a962-393f9a9b9c55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.938537] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance b00a98a4-4865-4a02-a353-3d1da9ef0e51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.938731] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bc700871-233c-4aa0-867e-4f166b6f54d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.938892] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 7b98ff40-1580-4175-adc5-66ca8977990a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.939033] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.939151] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Migration a4e8ae1c-ef52-421f-b917-f181609c1865 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 1134.939327] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.939446] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance bc0dad37-fc1d-4edc-8680-dba294dd724e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1134.995641] env[65503]: WARNING neutronclient.v2_0.client [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1134.996421] env[65503]: WARNING openstack [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1134.997441] env[65503]: WARNING openstack [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1135.030113] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1135.030581] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1135.070767] env[65503]: DEBUG nova.network.neutron [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1135.086016] env[65503]: DEBUG nova.network.neutron [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updated VIF entry in instance network info cache for port 409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1135.086387] env[65503]: DEBUG nova.network.neutron [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updating instance_info_cache with network_info: [{"id": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "address": "fa:16:3e:d3:ca:60", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409dc91c-74", "ovs_interfaceid": "409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1135.089412] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1135.089767] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1135.139026] env[65503]: WARNING neutronclient.v2_0.client [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1135.139805] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1135.140320] env[65503]: WARNING openstack [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1135.178689] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450783, 'name': ReconfigVM_Task, 'duration_secs': 0.337057} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.179050] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.179369] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.230618] env[65503]: DEBUG nova.network.neutron [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Updating instance_info_cache with network_info: [{"id": "f222ff54-daca-43ba-8e76-24669d7878e6", "address": "fa:16:3e:b8:3b:e2", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf222ff54-da", "ovs_interfaceid": "f222ff54-daca-43ba-8e76-24669d7878e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1135.402992] env[65503]: DEBUG nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1135.442930] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 1135.443178] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1135.443362] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '10', 'num_vm_active': '8', 'num_task_None': '8', 'num_os_type_None': '10', 'num_proj_d3ade9ce0dc44449bb7a3bf0c624e366': '3', 'io_workload': '2', 'num_vm_rescued': '1', 'num_proj_592efb180976432cbcecb9ad421e1bd1': '1', 'num_proj_521d40776571452e85178972f97c8622': '2', 'num_task_resize_migrating': '1', 'num_proj_3658921b747e4d78a2046b838cb36d26': '1', 'num_proj_34e8cd66745a40d2acebbce98050ee5d': '1', 'num_proj_b701e3d72ae1422a88e27cbb4ae8ea9b': '1', 'num_vm_building': '1', 'num_task_spawning': '1', 'num_proj_da471ceaf0f348cd87e42d3c03fdfeb8': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1135.597819] env[65503]: DEBUG oslo_concurrency.lockutils [req-ba70da99-7f95-4d28-bb49-8d3507874295 req-dddde571-9d6b-4f80-b74c-3b031e5e71e4 service nova] Releasing lock "refresh_cache-afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.606539] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523ae839-cdde-45e0-9cb6-cdd2c77c80d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.615062] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1613cf3-60aa-46cb-ae78-581854ca45ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.646366] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02121c1f-5fb8-4bb8-a5b7-3ba541568a8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.655352] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c740e5-0864-40d8-8c87-e9f76b320d8e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.669803] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.686514] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef91db4e-226c-413d-b467-162df027cdbf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.712327] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b25082-104d-43ff-8ada-3055dab08fa1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.735524] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.735925] env[65503]: DEBUG nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Instance network_info: |[{"id": "f222ff54-daca-43ba-8e76-24669d7878e6", "address": "fa:16:3e:b8:3b:e2", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf222ff54-da", "ovs_interfaceid": "f222ff54-daca-43ba-8e76-24669d7878e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1135.736317] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.741129] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:3b:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f222ff54-daca-43ba-8e76-24669d7878e6', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.748585] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Creating folder: Project (da471ceaf0f348cd87e42d3c03fdfeb8). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1135.748889] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba20ffc2-6fa8-4747-ac60-f19b670b76d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.761680] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Created folder: Project (da471ceaf0f348cd87e42d3c03fdfeb8) in parent group-v870190. [ 1135.761886] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Creating folder: Instances. Parent ref: group-v870489. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1135.762169] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d7216f2-879f-49a5-b990-827e7749de1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.773841] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Created folder: Instances in parent group-v870489. [ 1135.774168] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1135.774394] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.774761] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f478921-bb1a-4d1d-b3b0-0cfec8e5a0cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.796806] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.796806] env[65503]: value = "task-4450786" [ 1135.796806] env[65503]: _type = "Task" [ 1135.796806] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.806308] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450786, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.814411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "bc700871-233c-4aa0-867e-4f166b6f54d1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.814720] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.814954] env[65503]: INFO nova.compute.manager [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Attaching volume 6c93ec2a-f1f5-4070-bb08-2e54aed95b58 to /dev/sdb [ 1135.852925] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6a4d6a-d586-4257-b40b-ef4ab8ef6618 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.861657] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b36547-42aa-4058-b4d0-ee575bfd0259 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.876984] env[65503]: DEBUG nova.virt.block_device [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updating existing volume attachment record: c1d4da39-6ee4-446a-8f64-a78710525421 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1135.926858] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.174057] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.312601] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450786, 'name': CreateVM_Task, 'duration_secs': 0.350786} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.314524] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.315520] env[65503]: DEBUG nova.compute.manager [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Received event network-changed-f222ff54-daca-43ba-8e76-24669d7878e6 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1136.315728] env[65503]: DEBUG nova.compute.manager [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Refreshing instance network info cache due to event network-changed-f222ff54-daca-43ba-8e76-24669d7878e6. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1136.315980] env[65503]: DEBUG oslo_concurrency.lockutils [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Acquiring lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.316285] env[65503]: DEBUG oslo_concurrency.lockutils [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Acquired lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.316340] env[65503]: DEBUG nova.network.neutron [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Refreshing network info cache for port f222ff54-daca-43ba-8e76-24669d7878e6 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1136.319080] env[65503]: WARNING neutronclient.v2_0.client [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1136.319551] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.319775] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.320207] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.321083] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a732bfa3-9129-4bce-b964-948b763d081d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.327684] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1136.327684] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52522a2f-4e90-8d1f-c1ff-d262c2ab97de" [ 1136.327684] env[65503]: _type = "Task" [ 1136.327684] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.340335] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52522a2f-4e90-8d1f-c1ff-d262c2ab97de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.678761] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1136.679164] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.778s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.679295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.753s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.680975] env[65503]: INFO nova.compute.claims [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1136.684087] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.684814] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Cleaning up deleted instances with incomplete migration {{(pid=65503) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11954}} [ 1136.819976] env[65503]: WARNING neutronclient.v2_0.client [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1136.820803] env[65503]: WARNING openstack [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1136.821493] env[65503]: WARNING openstack [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1136.840023] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52522a2f-4e90-8d1f-c1ff-d262c2ab97de, 'name': SearchDatastore_Task, 'duration_secs': 0.029641} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.840134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.840366] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.841846] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.841846] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.841846] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.841846] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9351b37f-b4d9-48ae-a1c6-820cbe94f3af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.857326] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.857414] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1136.860261] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-271efad9-eb7a-4d28-9c44-ccd34903af40 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.866861] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1136.866861] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a6aab7-dac3-55ac-f3fa-9667c2cf378e" [ 1136.866861] env[65503]: _type = "Task" [ 1136.866861] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.876482] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a6aab7-dac3-55ac-f3fa-9667c2cf378e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.920144] env[65503]: WARNING openstack [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1136.920543] env[65503]: WARNING openstack [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1136.966601] env[65503]: WARNING neutronclient.v2_0.client [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1136.967310] env[65503]: WARNING openstack [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1136.967649] env[65503]: WARNING openstack [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1137.044753] env[65503]: DEBUG nova.network.neutron [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Updated VIF entry in instance network info cache for port f222ff54-daca-43ba-8e76-24669d7878e6. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1137.045149] env[65503]: DEBUG nova.network.neutron [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Updating instance_info_cache with network_info: [{"id": "f222ff54-daca-43ba-8e76-24669d7878e6", "address": "fa:16:3e:b8:3b:e2", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf222ff54-da", "ovs_interfaceid": "f222ff54-daca-43ba-8e76-24669d7878e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1137.355126] env[65503]: WARNING neutronclient.v2_0.client [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1137.378061] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a6aab7-dac3-55ac-f3fa-9667c2cf378e, 'name': SearchDatastore_Task, 'duration_secs': 0.011267} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.378938] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d0205d6-6b25-42af-8c6f-a55c8da996be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.384592] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1137.384592] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f8e9f8-37ae-7fc0-9218-5dbc14921d9d" [ 1137.384592] env[65503]: _type = "Task" [ 1137.384592] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.389139] env[65503]: DEBUG nova.network.neutron [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Port f8cdf0ee-e818-41cd-b8aa-b485ea737879 binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1137.393230] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f8e9f8-37ae-7fc0-9218-5dbc14921d9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.547588] env[65503]: DEBUG oslo_concurrency.lockutils [req-8f979dcd-665d-4f4d-b278-356655ee3ace req-6f68ea49-08fb-40bf-a78d-7b783ac075ec service nova] Releasing lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.903255] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f8e9f8-37ae-7fc0-9218-5dbc14921d9d, 'name': SearchDatastore_Task, 'duration_secs': 0.009696} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.906594] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.906877] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/bc0dad37-fc1d-4edc-8680-dba294dd724e.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1137.907440] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a37392b8-9494-423c-a384-5f55934bc776 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.916326] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1137.916326] env[65503]: value = "task-4450788" [ 1137.916326] env[65503]: _type = "Task" [ 1137.916326] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.927451] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450788, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.939575] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc71f4f-fa7c-4a12-92ed-d0e0f8a3fcf5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.947283] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9afb39-6a0d-4dd5-9d93-f13e57f6a127 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.981472] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5062c757-9039-4354-8801-ccf5346535fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.990138] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841313df-1d79-4a47-93d8-882277ab1978 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.004970] env[65503]: DEBUG nova.compute.provider_tree [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.419274] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.419469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.419523] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.431564] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450788, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462097} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.431818] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/bc0dad37-fc1d-4edc-8680-dba294dd724e.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1138.432054] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1138.432401] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e9684f4-9fe1-48a3-81c7-7b546b5e3e34 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.441784] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1138.441784] env[65503]: value = "task-4450790" [ 1138.441784] env[65503]: _type = "Task" [ 1138.441784] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.452592] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.508435] env[65503]: DEBUG nova.scheduler.client.report [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.952253] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066367} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.952492] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1138.953620] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7d991c-953d-4017-af0f-f0d0e356482a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.977800] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/bc0dad37-fc1d-4edc-8680-dba294dd724e.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.978084] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1722e047-fa24-4fc3-8f3b-878211488026 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.998151] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1138.998151] env[65503]: value = "task-4450791" [ 1138.998151] env[65503]: _type = "Task" [ 1138.998151] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.007212] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450791, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.014086] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.014626] env[65503]: DEBUG nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1139.182144] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.182444] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.425664] env[65503]: WARNING neutronclient.v2_0.client [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1139.458152] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.458337] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.458509] env[65503]: DEBUG nova.network.neutron [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1139.509984] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450791, 'name': ReconfigVM_Task, 'duration_secs': 0.282085} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.510279] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Reconfigured VM instance instance-0000006b to attach disk [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/bc0dad37-fc1d-4edc-8680-dba294dd724e.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1139.510935] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c998eb57-6dfc-4eff-b9b0-5b67ff01ee78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.519664] env[65503]: DEBUG nova.compute.utils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1139.521395] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1139.521395] env[65503]: value = "task-4450792" [ 1139.521395] env[65503]: _type = "Task" [ 1139.521395] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.521662] env[65503]: DEBUG nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1139.521846] env[65503]: DEBUG nova.network.neutron [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1139.522170] env[65503]: WARNING neutronclient.v2_0.client [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1139.522473] env[65503]: WARNING neutronclient.v2_0.client [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1139.523058] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1139.523441] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1139.541236] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450792, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.586186] env[65503]: DEBUG nova.policy [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af1890ab617d443e985db57a798cac5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93906c603f7a4b18a34fc4b42fb6d6c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1139.690245] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.690439] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.690633] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.691066] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Cleaning up deleted instances {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11916}} [ 1139.958467] env[65503]: DEBUG nova.network.neutron [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Successfully created port: 038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1139.961385] env[65503]: WARNING neutronclient.v2_0.client [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1139.961737] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1139.962206] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1140.032126] env[65503]: DEBUG nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1140.049820] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450792, 'name': Rename_Task, 'duration_secs': 0.19255} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.050106] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1140.050334] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2a56615-95c1-48f4-8cb6-837e536f37bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.059259] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1140.059259] env[65503]: value = "task-4450793" [ 1140.059259] env[65503]: _type = "Task" [ 1140.059259] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.069041] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450793, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.079673] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1140.080060] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1140.143094] env[65503]: WARNING neutronclient.v2_0.client [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1140.143794] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1140.144146] env[65503]: WARNING openstack [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1140.216463] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] There are 60 instances to clean {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11925}} [ 1140.216463] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: dacb9e1b-26d3-43a9-9f10-95f2e3ef8b5f] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1140.315422] env[65503]: DEBUG nova.network.neutron [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1140.426114] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1140.426424] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870492', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'name': 'volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc700871-233c-4aa0-867e-4f166b6f54d1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'serial': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1140.427481] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7cd933-527c-45e2-bfdd-444353fced77 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.445014] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df457ea-fdeb-490b-9690-b3972ec23b6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.472344] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58/volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.472597] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94b5c51a-6600-4c91-b53c-ab780c1b8704 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.492455] env[65503]: DEBUG oslo_vmware.api [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1140.492455] env[65503]: value = "task-4450794" [ 1140.492455] env[65503]: _type = "Task" [ 1140.492455] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.501207] env[65503]: DEBUG oslo_vmware.api [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450794, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.570101] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450793, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.719317] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: eb18fc5c-168b-4442-af66-15e255ecc535] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1140.818599] env[65503]: DEBUG oslo_concurrency.lockutils [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.003169] env[65503]: DEBUG oslo_vmware.api [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450794, 'name': ReconfigVM_Task, 'duration_secs': 0.373571} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.003498] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Reconfigured VM instance instance-00000067 to attach disk [datastore2] volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58/volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1141.008137] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b031e8b3-2044-4bd8-92b6-773068a8dffb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.024424] env[65503]: DEBUG oslo_vmware.api [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1141.024424] env[65503]: value = "task-4450795" [ 1141.024424] env[65503]: _type = "Task" [ 1141.024424] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.034318] env[65503]: DEBUG oslo_vmware.api [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.049979] env[65503]: DEBUG nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1141.071324] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450793, 'name': PowerOnVM_Task} progress is 90%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.081405] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1141.081680] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1141.081921] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1141.082150] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1141.082294] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1141.082433] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1141.082720] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1141.082813] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1141.082973] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1141.083143] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1141.083350] env[65503]: DEBUG nova.virt.hardware [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1141.084394] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc92071-57c5-4c87-9382-a73e4745c9d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.094667] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de1fe82-af40-4292-899d-fb069d8abf0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.223087] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 7802baf8-04ff-4df4-90b0-71cb97dddc83] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1141.328567] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97615020-c1f1-4222-b385-7fd6a7468417 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.336980] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7efd87d-304c-4a97-8ab3-01c419876619 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.392069] env[65503]: DEBUG nova.compute.manager [req-72913681-e3a3-4237-9ba3-165b879bb0ff req-3b0c77fe-c18c-4658-b009-c83e8abb98af service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Received event network-vif-plugged-038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1141.392422] env[65503]: DEBUG oslo_concurrency.lockutils [req-72913681-e3a3-4237-9ba3-165b879bb0ff req-3b0c77fe-c18c-4658-b009-c83e8abb98af service nova] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.392422] env[65503]: DEBUG oslo_concurrency.lockutils [req-72913681-e3a3-4237-9ba3-165b879bb0ff req-3b0c77fe-c18c-4658-b009-c83e8abb98af service nova] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.392642] env[65503]: DEBUG oslo_concurrency.lockutils [req-72913681-e3a3-4237-9ba3-165b879bb0ff req-3b0c77fe-c18c-4658-b009-c83e8abb98af service nova] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.392740] env[65503]: DEBUG nova.compute.manager [req-72913681-e3a3-4237-9ba3-165b879bb0ff req-3b0c77fe-c18c-4658-b009-c83e8abb98af service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] No waiting events found dispatching network-vif-plugged-038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1141.392897] env[65503]: WARNING nova.compute.manager [req-72913681-e3a3-4237-9ba3-165b879bb0ff req-3b0c77fe-c18c-4658-b009-c83e8abb98af service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Received unexpected event network-vif-plugged-038e2362-a15e-4f40-bbd0-5289c014118b for instance with vm_state building and task_state spawning. [ 1141.497739] env[65503]: DEBUG nova.network.neutron [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Successfully updated port: 038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1141.536102] env[65503]: DEBUG oslo_vmware.api [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450795, 'name': ReconfigVM_Task, 'duration_secs': 0.16219} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.536425] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870492', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'name': 'volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc700871-233c-4aa0-867e-4f166b6f54d1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'serial': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1141.571625] env[65503]: DEBUG oslo_vmware.api [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450793, 'name': PowerOnVM_Task, 'duration_secs': 1.080737} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.571892] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.572229] env[65503]: INFO nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1141.572540] env[65503]: DEBUG nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1141.573314] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e45d7d-42fb-4598-aee8-2cb8419fe6a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.727505] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f71dca10-3b68-4f1e-868e-a8c8271f7c88] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1142.000936] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.001199] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.001306] env[65503]: DEBUG nova.network.neutron [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1142.089731] env[65503]: INFO nova.compute.manager [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Took 12.83 seconds to build instance. [ 1142.230390] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bcc91c22-5f92-4233-a293-54f2790a750b] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1142.449370] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a01066-4c59-4458-b399-d0ed83230293 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.473082] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddfd6ef-7ea9-42bb-acce-39cfede665a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.481112] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1142.504266] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1142.504680] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1142.551208] env[65503]: DEBUG nova.network.neutron [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1142.571778] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1142.572694] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1142.579903] env[65503]: DEBUG nova.objects.instance [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'flavor' on Instance uuid bc700871-233c-4aa0-867e-4f166b6f54d1 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.591680] env[65503]: DEBUG oslo_concurrency.lockutils [None req-df6b9799-1186-4e01-952f-c4aa348095ac tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.336s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.670213] env[65503]: WARNING neutronclient.v2_0.client [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1142.670942] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1142.671310] env[65503]: WARNING openstack [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1142.734257] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f50dce3d-4ef2-46e3-9044-c53b55ddbefb] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1142.754761] env[65503]: DEBUG nova.network.neutron [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1142.987225] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1142.987563] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-416d8537-5939-4ad0-8285-6ed7689737a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.996876] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1142.996876] env[65503]: value = "task-4450796" [ 1142.996876] env[65503]: _type = "Task" [ 1142.996876] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.005391] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450796, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.085409] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0223cb56-b86c-40f0-a873-f618c3b65e64 tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.271s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.236988] env[65503]: INFO nova.compute.manager [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Rescuing [ 1143.237308] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.237387] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.237608] env[65503]: DEBUG nova.network.neutron [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1143.239041] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f5319f15-16eb-468a-a70e-7226963ed219] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1143.258100] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.258185] env[65503]: DEBUG nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Instance network_info: |[{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1143.259271] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:d8:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '038e2362-a15e-4f40-bbd0-5289c014118b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1143.268213] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Creating folder: Project (93906c603f7a4b18a34fc4b42fb6d6c1). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1143.269189] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c09c503a-2cfe-4772-bae4-c3227c9cbd2c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.281755] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Created folder: Project (93906c603f7a4b18a34fc4b42fb6d6c1) in parent group-v870190. [ 1143.281978] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Creating folder: Instances. Parent ref: group-v870493. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1143.282337] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23181e6a-3984-4dfc-9710-b04401a3dd89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.294890] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Created folder: Instances in parent group-v870493. [ 1143.295182] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1143.295391] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1143.295636] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f2e84b4-0276-48b0-ba9c-677c4ff24320 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.320129] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1143.320129] env[65503]: value = "task-4450799" [ 1143.320129] env[65503]: _type = "Task" [ 1143.320129] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.332020] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450799, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.431876] env[65503]: DEBUG nova.compute.manager [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Received event network-changed-038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1143.432238] env[65503]: DEBUG nova.compute.manager [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Refreshing instance network info cache due to event network-changed-038e2362-a15e-4f40-bbd0-5289c014118b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1143.432586] env[65503]: DEBUG oslo_concurrency.lockutils [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.432835] env[65503]: DEBUG oslo_concurrency.lockutils [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.433121] env[65503]: DEBUG nova.network.neutron [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Refreshing network info cache for port 038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1143.507656] env[65503]: DEBUG oslo_vmware.api [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450796, 'name': PowerOnVM_Task, 'duration_secs': 0.436069} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.507898] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1143.508083] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc9e8e8-a773-42c8-a03e-02d6c215d6f8 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1143.741623] env[65503]: WARNING neutronclient.v2_0.client [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1143.742934] env[65503]: WARNING openstack [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1143.742934] env[65503]: WARNING openstack [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1143.750836] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 1d1a96cc-63b3-472c-b94a-1ea00763f770] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1143.814951] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "bc700871-233c-4aa0-867e-4f166b6f54d1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.815236] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.836051] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450799, 'name': CreateVM_Task, 'duration_secs': 0.363669} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.836385] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1143.836873] env[65503]: WARNING neutronclient.v2_0.client [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1143.837491] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.837491] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.837653] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1143.837856] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64da277b-d5c7-47f8-b04c-145454fe266d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.843417] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1143.843417] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527b585b-5c3a-ec1f-b963-a880cc59ef3f" [ 1143.843417] env[65503]: _type = "Task" [ 1143.843417] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.852638] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527b585b-5c3a-ec1f-b963-a880cc59ef3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.854042] env[65503]: WARNING openstack [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1143.854406] env[65503]: WARNING openstack [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1143.903695] env[65503]: WARNING neutronclient.v2_0.client [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1143.904578] env[65503]: WARNING openstack [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1143.905095] env[65503]: WARNING openstack [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1143.936253] env[65503]: WARNING neutronclient.v2_0.client [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1143.936978] env[65503]: WARNING openstack [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1143.937342] env[65503]: WARNING openstack [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1143.986449] env[65503]: DEBUG nova.network.neutron [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Updating instance_info_cache with network_info: [{"id": "f222ff54-daca-43ba-8e76-24669d7878e6", "address": "fa:16:3e:b8:3b:e2", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf222ff54-da", "ovs_interfaceid": "f222ff54-daca-43ba-8e76-24669d7878e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1144.026027] env[65503]: WARNING openstack [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1144.026416] env[65503]: WARNING openstack [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1144.105353] env[65503]: WARNING neutronclient.v2_0.client [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1144.106563] env[65503]: WARNING openstack [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1144.107179] env[65503]: WARNING openstack [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1144.199054] env[65503]: DEBUG nova.network.neutron [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updated VIF entry in instance network info cache for port 038e2362-a15e-4f40-bbd0-5289c014118b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1144.199170] env[65503]: DEBUG nova.network.neutron [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1144.254924] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bee97942-afb2-465f-9774-56f5aa8becca] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1144.318994] env[65503]: INFO nova.compute.manager [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Detaching volume 6c93ec2a-f1f5-4070-bb08-2e54aed95b58 [ 1144.357066] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527b585b-5c3a-ec1f-b963-a880cc59ef3f, 'name': SearchDatastore_Task, 'duration_secs': 0.010858} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.358321] env[65503]: INFO nova.virt.block_device [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Attempting to driver detach volume 6c93ec2a-f1f5-4070-bb08-2e54aed95b58 from mountpoint /dev/sdb [ 1144.358591] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1144.358834] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870492', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'name': 'volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc700871-233c-4aa0-867e-4f166b6f54d1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'serial': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1144.359253] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.359534] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1144.359736] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.359891] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.360096] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1144.361036] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cff2dac-8447-4772-8ec5-aca495a915d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.364160] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1a6080a-08b1-4900-aef8-1f41d937b3f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.390436] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003558a1-b662-4b08-bb37-373bcc30e15e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.393250] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1144.393535] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1144.394182] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b0335b9-db62-4c23-b4ae-c99c5d0fc6ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.401501] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1144.401501] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206ec07-bc52-5a88-a20a-7b569ce0a473" [ 1144.401501] env[65503]: _type = "Task" [ 1144.401501] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.407891] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c44e1c-cd9a-429c-a174-9ae073588ad8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.438412] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206ec07-bc52-5a88-a20a-7b569ce0a473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.439565] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fb208e-2ab1-452c-a3e9-65bfa738d36a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.455613] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] The volume has not been displaced from its original location: [datastore2] volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58/volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1144.460859] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1144.461200] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f21b36ad-0e01-49d1-9236-305d5edad184 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.480101] env[65503]: DEBUG oslo_vmware.api [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1144.480101] env[65503]: value = "task-4450800" [ 1144.480101] env[65503]: _type = "Task" [ 1144.480101] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.489138] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "refresh_cache-bc0dad37-fc1d-4edc-8680-dba294dd724e" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.491162] env[65503]: DEBUG oslo_vmware.api [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450800, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.701728] env[65503]: DEBUG oslo_concurrency.lockutils [req-b50c5bb2-6f80-4780-8f21-9e6e1679438f req-0b4bb434-d84e-4918-9cac-a27799631c89 service nova] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.758499] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f89ca00e-d54e-4040-bf18-9a5ec96378d5] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1144.916461] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5206ec07-bc52-5a88-a20a-7b569ce0a473, 'name': SearchDatastore_Task, 'duration_secs': 0.021358} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.917458] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e36ee523-e986-4e51-b9d7-305198c2ca7a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.924419] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1144.924419] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52059518-a698-0789-0bdf-037758c16e73" [ 1144.924419] env[65503]: _type = "Task" [ 1144.924419] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.935299] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52059518-a698-0789-0bdf-037758c16e73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.989996] env[65503]: DEBUG oslo_vmware.api [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450800, 'name': ReconfigVM_Task, 'duration_secs': 0.498987} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.990318] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1144.994865] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28c77e56-53fd-43a4-9cf2-e5a0bc95d3ef {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.014590] env[65503]: DEBUG oslo_vmware.api [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1145.014590] env[65503]: value = "task-4450801" [ 1145.014590] env[65503]: _type = "Task" [ 1145.014590] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.025318] env[65503]: DEBUG oslo_vmware.api [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.262027] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 96d8f433-9b86-422f-88ef-99836fb21f30] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1145.435412] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52059518-a698-0789-0bdf-037758c16e73, 'name': SearchDatastore_Task, 'duration_secs': 0.04959} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.435725] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.435983] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7/0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1145.436271] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5072d809-62b5-40d1-b9fe-905036e3f7e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.444287] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1145.444287] env[65503]: value = "task-4450802" [ 1145.444287] env[65503]: _type = "Task" [ 1145.444287] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.452494] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.525449] env[65503]: DEBUG oslo_vmware.api [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450801, 'name': ReconfigVM_Task, 'duration_secs': 0.239388} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.525812] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870492', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'name': 'volume-6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc700871-233c-4aa0-867e-4f166b6f54d1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58', 'serial': '6c93ec2a-f1f5-4070-bb08-2e54aed95b58'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1145.555748] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1145.626608] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1145.627044] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1145.667608] env[65503]: DEBUG nova.network.neutron [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Port f8cdf0ee-e818-41cd-b8aa-b485ea737879 binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1145.667984] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.668166] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.668346] env[65503]: DEBUG nova.network.neutron [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1145.765754] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: fcdcabb9-f076-4fa9-ac30-3220eb6064da] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1145.954390] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476197} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.954657] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7/0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1145.954885] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1145.955164] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0e024cb-3fa7-4b26-bd4d-6e391fec4aed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.962177] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1145.962177] env[65503]: value = "task-4450803" [ 1145.962177] env[65503]: _type = "Task" [ 1145.962177] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.973946] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450803, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.033043] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.033420] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4f94e1c-0f4b-450f-a2fe-b179b075bc17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.040757] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1146.040757] env[65503]: value = "task-4450804" [ 1146.040757] env[65503]: _type = "Task" [ 1146.040757] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.050862] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450804, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.077600] env[65503]: DEBUG nova.objects.instance [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'flavor' on Instance uuid bc700871-233c-4aa0-867e-4f166b6f54d1 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.171707] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1146.172642] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1146.173115] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1146.269516] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 9e1b174f-c7cb-45f1-b7c0-b980f32823c8] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1146.316575] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1146.316993] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1146.390732] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1146.391467] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1146.391797] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1146.472795] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06712} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.472926] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1146.473896] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9b3b04-1597-4bc9-a9bb-6bc820424366 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.500181] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7/0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1146.502639] env[65503]: DEBUG nova.network.neutron [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1146.503895] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60926afb-3916-4538-a666-7049eb54279d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.526590] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1146.526590] env[65503]: value = "task-4450805" [ 1146.526590] env[65503]: _type = "Task" [ 1146.526590] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.539750] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450805, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.553753] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450804, 'name': PowerOffVM_Task, 'duration_secs': 0.224608} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.553954] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1146.554815] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bde8be7-22b5-4137-919e-58dd6a97ce3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.576851] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbf5264-73bb-4d48-b9f6-715314114eed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.618977] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.618977] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e221877-000b-4d9a-a1a5-ddf5d06837b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.626259] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1146.626259] env[65503]: value = "task-4450806" [ 1146.626259] env[65503]: _type = "Task" [ 1146.626259] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.639040] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1146.639040] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1146.639545] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.639545] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.639545] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.639892] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-546a5ad0-d8ec-4294-80d0-535f6c370164 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.650813] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.651039] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1146.651821] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b462126-2f4d-4ca2-802d-fafcae5dcbbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.658294] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1146.658294] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5282c796-e1b6-047f-ef80-703fa743326b" [ 1146.658294] env[65503]: _type = "Task" [ 1146.658294] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.669400] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5282c796-e1b6-047f-ef80-703fa743326b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.773541] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 310ee0b5-07ee-4cf0-b262-5e8b473efa3d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1147.020177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.039951] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450805, 'name': ReconfigVM_Task, 'duration_secs': 0.317967} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.040204] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7/0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1147.040811] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bed6c43-70dc-47ba-86cb-80fab25644dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.047631] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.047858] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.049853] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1147.049853] env[65503]: value = "task-4450807" [ 1147.049853] env[65503]: _type = "Task" [ 1147.049853] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.059907] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450807, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.084932] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5866618e-ba69-4dc0-99fe-f44a46486a5a tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.270s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.170552] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5282c796-e1b6-047f-ef80-703fa743326b, 'name': SearchDatastore_Task, 'duration_secs': 0.014566} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.171455] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe826339-4437-4a5d-817a-becd5ff2e8c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.177995] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1147.177995] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d44bd5-bea7-90ac-e55c-fe60370f841e" [ 1147.177995] env[65503]: _type = "Task" [ 1147.177995] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.186707] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d44bd5-bea7-90ac-e55c-fe60370f841e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.277244] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: a043a8e2-8661-4d80-939d-8e7b02b0459f] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1147.524791] env[65503]: DEBUG nova.compute.manager [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=65503) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:925}} [ 1147.550643] env[65503]: INFO nova.compute.manager [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Detaching volume 1dc74fb4-8c72-4626-9e0a-9dad8090a4ba [ 1147.566209] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450807, 'name': Rename_Task, 'duration_secs': 0.141897} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.566430] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.566727] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dfe7823-3132-4f08-ae29-797df363cda4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.574131] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1147.574131] env[65503]: value = "task-4450808" [ 1147.574131] env[65503]: _type = "Task" [ 1147.574131] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.582527] env[65503]: INFO nova.virt.block_device [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Attempting to driver detach volume 1dc74fb4-8c72-4626-9e0a-9dad8090a4ba from mountpoint /dev/sdb [ 1147.582797] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1147.583015] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870468', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'name': 'volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b00a98a4-4865-4a02-a353-3d1da9ef0e51', 'attached_at': '', 'detached_at': '', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'serial': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1147.583895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c96574-058f-4a1e-8079-3747b3d6c86f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.590570] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.611120] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183f20a9-b7bf-41cb-9ff2-433c26767d5e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.621972] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46933f3-79e9-462e-b83e-142a9369009d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.648352] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9a8633-3987-41ed-8620-b255ae925881 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.665389] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The volume has not been displaced from its original location: [datastore2] volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba/volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1147.670938] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1147.671346] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c97a9712-1d51-4562-87f8-9e2cca79cffd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.698687] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d44bd5-bea7-90ac-e55c-fe60370f841e, 'name': SearchDatastore_Task, 'duration_secs': 0.051266} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.700081] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.700361] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. {{(pid=65503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1147.700689] env[65503]: DEBUG oslo_vmware.api [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1147.700689] env[65503]: value = "task-4450809" [ 1147.700689] env[65503]: _type = "Task" [ 1147.700689] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.700905] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6948347-ea24-4503-9f76-ab5ecf35bc84 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.713212] env[65503]: DEBUG oslo_vmware.api [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450809, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.714662] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1147.714662] env[65503]: value = "task-4450810" [ 1147.714662] env[65503]: _type = "Task" [ 1147.714662] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.724040] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.780762] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 82415443-1884-4898-996e-828d23f67f23] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1148.087590] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450808, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.118419] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "bc700871-233c-4aa0-867e-4f166b6f54d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.118747] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.118973] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "bc700871-233c-4aa0-867e-4f166b6f54d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.119174] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.119340] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.121760] env[65503]: INFO nova.compute.manager [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Terminating instance [ 1148.218193] env[65503]: DEBUG oslo_vmware.api [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450809, 'name': ReconfigVM_Task, 'duration_secs': 0.300312} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.222092] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1148.227949] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19106ea9-0a5d-4e5c-91d7-15068f62d2d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.245953] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450810, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.247477] env[65503]: DEBUG oslo_vmware.api [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1148.247477] env[65503]: value = "task-4450811" [ 1148.247477] env[65503]: _type = "Task" [ 1148.247477] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.257118] env[65503]: DEBUG oslo_vmware.api [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450811, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.284497] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: b135ecea-cd3b-4ae2-8c69-2a3c2fe69e14] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1148.588048] env[65503]: DEBUG oslo_vmware.api [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450808, 'name': PowerOnVM_Task, 'duration_secs': 0.638645} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.588048] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.588048] env[65503]: INFO nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Took 7.54 seconds to spawn the instance on the hypervisor. [ 1148.588048] env[65503]: DEBUG nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1148.588780] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317fb57a-2c36-4de3-8961-3f5274f33955 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.626971] env[65503]: DEBUG nova.compute.manager [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1148.627468] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1148.628980] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a91a807-bc1d-4018-bcfd-848135abf63f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.633869] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.634139] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.639351] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.639351] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-406c1e66-d88c-44d5-8938-a16041775438 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.647638] env[65503]: DEBUG oslo_vmware.api [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1148.647638] env[65503]: value = "task-4450812" [ 1148.647638] env[65503]: _type = "Task" [ 1148.647638] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.661407] env[65503]: DEBUG oslo_vmware.api [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.729640] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450810, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6074} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.729856] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. [ 1148.730685] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3d9491-9099-4c65-88e8-c21b4f5797c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.758106] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.761433] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbe65b19-d3ed-45bd-9f8c-3e33f36e10a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.779779] env[65503]: DEBUG oslo_vmware.api [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450811, 'name': ReconfigVM_Task, 'duration_secs': 0.167323} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.781142] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870468', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'name': 'volume-1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b00a98a4-4865-4a02-a353-3d1da9ef0e51', 'attached_at': '', 'detached_at': '', 'volume_id': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba', 'serial': '1dc74fb4-8c72-4626-9e0a-9dad8090a4ba'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1148.783545] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1148.783545] env[65503]: value = "task-4450813" [ 1148.783545] env[65503]: _type = "Task" [ 1148.783545] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.787868] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 8962b1b8-4875-4a1a-b231-36385755a976] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1149.106563] env[65503]: INFO nova.compute.manager [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Took 13.20 seconds to build instance. [ 1149.139876] env[65503]: DEBUG nova.objects.instance [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'migration_context' on Instance uuid 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.158126] env[65503]: DEBUG oslo_vmware.api [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450812, 'name': PowerOffVM_Task, 'duration_secs': 0.382913} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.158378] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.158536] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.158778] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-827da04b-58d0-410f-9be8-eef84f5c4d6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.251556] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.251812] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.252421] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleting the datastore file [datastore2] bc700871-233c-4aa0-867e-4f166b6f54d1 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.252421] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d64e7d99-aa86-4488-8cfb-9c79e74cd4c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.260493] env[65503]: DEBUG oslo_vmware.api [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for the task: (returnval){ [ 1149.260493] env[65503]: value = "task-4450815" [ 1149.260493] env[65503]: _type = "Task" [ 1149.260493] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.271758] env[65503]: DEBUG oslo_vmware.api [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.290833] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 81e27e9b-7d7e-4b04-8257-268660fd9ec3] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1149.296800] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450813, 'name': ReconfigVM_Task, 'duration_secs': 0.324098} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.297720] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Reconfigured VM instance instance-0000006b to attach disk [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.298236] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a109a4dd-1cfc-4f5a-8461-b1476b264732 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.325620] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbfefdf1-b3a5-4976-aee5-2d23312cc7e0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.337126] env[65503]: DEBUG nova.objects.instance [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'flavor' on Instance uuid b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.344646] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1149.344646] env[65503]: value = "task-4450816" [ 1149.344646] env[65503]: _type = "Task" [ 1149.344646] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.354870] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450816, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.510888] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.511796] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.608900] env[65503]: DEBUG oslo_concurrency.lockutils [None req-98f6e637-01f6-4214-84fa-5473a6732294 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.711s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.772665] env[65503]: DEBUG oslo_vmware.api [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Task: {'id': task-4450815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262922} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.772927] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.773118] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.773287] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.773453] env[65503]: INFO nova.compute.manager [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1149.773756] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1149.776465] env[65503]: DEBUG nova.compute.manager [-] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1149.776578] env[65503]: DEBUG nova.network.neutron [-] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1149.776798] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1149.777322] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1149.777568] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1149.798563] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 3b31611f-84f9-4bf6-8e26-f64db06d15ed] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1149.834154] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d512ddd-db08-4def-849e-a4f197661750 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.842709] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1149.852066] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420e0fd3-2435-4605-ba77-6a13ee6f223f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.861979] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450816, 'name': ReconfigVM_Task, 'duration_secs': 0.171355} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.888137] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1149.889016] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2803eec3-d0bd-49c9-b13f-8498976dab45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.891342] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76f57aa-8c6a-4f9f-91ab-f0232c78e0da {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.902056] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025a22a6-b919-488b-8ebc-71d312cdd454 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.906328] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1149.906328] env[65503]: value = "task-4450817" [ 1149.906328] env[65503]: _type = "Task" [ 1149.906328] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.918604] env[65503]: DEBUG nova.compute.provider_tree [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.926164] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450817, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.015454] env[65503]: INFO nova.compute.manager [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Detaching volume 1c9d3e4e-c23c-460d-827c-3a19ff329147 [ 1150.071251] env[65503]: INFO nova.virt.block_device [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Attempting to driver detach volume 1c9d3e4e-c23c-460d-827c-3a19ff329147 from mountpoint /dev/sdb [ 1150.071251] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1150.071451] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870473', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'name': 'volume-1c9d3e4e-c23c-460d-827c-3a19ff329147', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f55645-f152-4fc9-a962-393f9a9b9c55', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'serial': '1c9d3e4e-c23c-460d-827c-3a19ff329147'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1150.072341] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ff7763-8c4c-4309-bffb-91ab2dcdecd0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.096532] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cfa843-9c28-4c27-a218-f43c397b2554 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.106433] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63a6499-23f9-4cdb-849f-acd5d9d90f70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.128556] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89d9194-ee59-483a-a668-b484828032e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.146415] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] The volume has not been displaced from its original location: [datastore1] volume-1c9d3e4e-c23c-460d-827c-3a19ff329147/volume-1c9d3e4e-c23c-460d-827c-3a19ff329147.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1150.152145] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1150.152510] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3eb9294-0db1-4760-a03d-03d87e3072e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.171999] env[65503]: DEBUG oslo_vmware.api [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1150.171999] env[65503]: value = "task-4450818" [ 1150.171999] env[65503]: _type = "Task" [ 1150.171999] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.181564] env[65503]: DEBUG oslo_vmware.api [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450818, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.301942] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f666b0a3-3679-456b-bc59-38107c299f80] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1150.356799] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6771b8b8-7099-4310-b488-0c735856a1e7 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.309s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.413742] env[65503]: DEBUG nova.compute.manager [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Received event network-changed-038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1150.414137] env[65503]: DEBUG nova.compute.manager [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Refreshing instance network info cache due to event network-changed-038e2362-a15e-4f40-bbd0-5289c014118b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1150.414357] env[65503]: DEBUG oslo_concurrency.lockutils [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.414571] env[65503]: DEBUG oslo_concurrency.lockutils [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.414730] env[65503]: DEBUG nova.network.neutron [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Refreshing network info cache for port 038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1150.424632] env[65503]: DEBUG nova.scheduler.client.report [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.428260] env[65503]: DEBUG oslo_vmware.api [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450817, 'name': PowerOnVM_Task, 'duration_secs': 0.413381} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.429216] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.432853] env[65503]: DEBUG nova.compute.manager [None req-3ac9ba9a-cc9a-4d59-9673-dc9c4087b983 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1150.434066] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c2357b-db82-44a0-b15b-46f2e3be5526 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.682460] env[65503]: DEBUG oslo_vmware.api [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450818, 'name': ReconfigVM_Task, 'duration_secs': 0.261989} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.682633] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1150.687494] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f6a59db-3edf-4015-adc6-63ce629fde14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.705721] env[65503]: DEBUG oslo_vmware.api [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1150.705721] env[65503]: value = "task-4450819" [ 1150.705721] env[65503]: _type = "Task" [ 1150.705721] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.717224] env[65503]: DEBUG oslo_vmware.api [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450819, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.786466] env[65503]: DEBUG nova.compute.manager [req-96e09169-d0af-44ed-8d2b-7477bfb766d9 req-a7cf13cd-2917-4961-be85-8a94ebc9b0fc service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Received event network-vif-deleted-61496b79-7af0-4518-be7a-0b0c270e3eff {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1150.786723] env[65503]: INFO nova.compute.manager [req-96e09169-d0af-44ed-8d2b-7477bfb766d9 req-a7cf13cd-2917-4961-be85-8a94ebc9b0fc service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Neutron deleted interface 61496b79-7af0-4518-be7a-0b0c270e3eff; detaching it from the instance and deleting it from the info cache [ 1150.786917] env[65503]: DEBUG nova.network.neutron [req-96e09169-d0af-44ed-8d2b-7477bfb766d9 req-a7cf13cd-2917-4961-be85-8a94ebc9b0fc service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1150.805861] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 2a1587cd-8b47-439f-948c-d58a5dc8220e] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1150.920059] env[65503]: WARNING neutronclient.v2_0.client [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1150.921424] env[65503]: WARNING openstack [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1150.923018] env[65503]: WARNING openstack [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1151.092453] env[65503]: WARNING openstack [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1151.092844] env[65503]: WARNING openstack [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1151.158199] env[65503]: WARNING neutronclient.v2_0.client [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1151.159042] env[65503]: WARNING openstack [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1151.159242] env[65503]: WARNING openstack [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1151.218612] env[65503]: DEBUG oslo_vmware.api [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450819, 'name': ReconfigVM_Task, 'duration_secs': 0.160684} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.219128] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870473', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'name': 'volume-1c9d3e4e-c23c-460d-827c-3a19ff329147', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f55645-f152-4fc9-a962-393f9a9b9c55', 'attached_at': '', 'detached_at': '', 'volume_id': '1c9d3e4e-c23c-460d-827c-3a19ff329147', 'serial': '1c9d3e4e-c23c-460d-827c-3a19ff329147'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1151.223345] env[65503]: DEBUG nova.network.neutron [-] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1151.255482] env[65503]: DEBUG nova.network.neutron [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updated VIF entry in instance network info cache for port 038e2362-a15e-4f40-bbd0-5289c014118b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1151.255973] env[65503]: DEBUG nova.network.neutron [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1151.291058] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3cc25b4-a5c2-4e85-b264-e1e1bbbedc78 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.302914] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a1d072-5248-4326-8958-19e06268ff50 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.314746] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 0ece7ad4-40cd-43e4-8cbc-dddd43f0645d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1151.345542] env[65503]: DEBUG nova.compute.manager [req-96e09169-d0af-44ed-8d2b-7477bfb766d9 req-a7cf13cd-2917-4961-be85-8a94ebc9b0fc service nova] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Detach interface failed, port_id=61496b79-7af0-4518-be7a-0b0c270e3eff, reason: Instance bc700871-233c-4aa0-867e-4f166b6f54d1 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1151.402269] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.403255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.403255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.403255] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.403460] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.405857] env[65503]: INFO nova.compute.manager [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Terminating instance [ 1151.443384] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.809s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.725555] env[65503]: INFO nova.compute.manager [-] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Took 1.95 seconds to deallocate network for instance. [ 1151.759242] env[65503]: DEBUG oslo_concurrency.lockutils [req-4a8af4d0-3e90-4b32-90e1-9158c8a05ec2 req-2a1a8582-92af-4e57-8d3f-361a48b2ad55 service nova] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.784736] env[65503]: DEBUG nova.objects.instance [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lazy-loading 'flavor' on Instance uuid b7f55645-f152-4fc9-a962-393f9a9b9c55 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.818110] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 909b3535-9410-4820-a34d-6c0e9627f506] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1151.867092] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.867349] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.912793] env[65503]: DEBUG nova.compute.manager [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1151.913031] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1151.914250] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d7b60b-e754-4c2d-be7b-8a6c589b6771 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.922668] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1151.922976] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d4d3e17-82e9-40b1-9756-b2b2a5a217c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.929774] env[65503]: DEBUG oslo_vmware.api [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1151.929774] env[65503]: value = "task-4450820" [ 1151.929774] env[65503]: _type = "Task" [ 1151.929774] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.938552] env[65503]: DEBUG oslo_vmware.api [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.232805] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.233111] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.233334] env[65503]: DEBUG nova.objects.instance [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lazy-loading 'resources' on Instance uuid bc700871-233c-4aa0-867e-4f166b6f54d1 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.322101] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 236e651f-6a27-4601-8a76-ca1619e32dc6] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1152.369626] env[65503]: DEBUG nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1152.440875] env[65503]: DEBUG oslo_vmware.api [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450820, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.792262] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fe044d0c-0db0-4b53-8eda-bda080b5ae2c tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.281s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.825140] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: d7dd714a-a738-4d68-bbf0-32daf4a1c49b] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1152.890730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.914753] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534df19c-5ea7-499f-9631-44b2bc6cf3dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.923057] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88019e9-2b9e-4d7e-ace3-3ee90f72ca25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.958040] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24feb527-f5c8-4715-8b1f-40a3fc1c6cc4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.969804] env[65503]: DEBUG oslo_vmware.api [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450820, 'name': PowerOffVM_Task, 'duration_secs': 0.533671} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.972374] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1152.972555] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1152.972980] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b265e75c-922c-44a7-9fc0-f092e49847aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.975581] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72fb80e-1ac9-46cb-8b04-dc0990445537 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.980819] env[65503]: INFO nova.compute.manager [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Swapping old allocation on dict_keys(['988ff85a-1d12-41bb-a369-e298e8491ca1']) held by migration a4e8ae1c-ef52-421f-b917-f181609c1865 for instance [ 1152.995819] env[65503]: DEBUG nova.compute.provider_tree [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.011111] env[65503]: DEBUG nova.scheduler.client.report [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Overwriting current allocation {'allocations': {'988ff85a-1d12-41bb-a369-e298e8491ca1': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 148}}, 'project_id': '3658921b747e4d78a2046b838cb36d26', 'user_id': '0d16022d9f8b43ba8e97191fdc2b1a8d', 'consumer_generation': 1} on consumer 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a {{(pid=65503) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1153.064221] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1153.064999] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1153.064999] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleting the datastore file [datastore1] b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.064999] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-140b94dd-218b-4315-9eb2-20d623e63f0f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.071419] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1153.076853] env[65503]: DEBUG oslo_vmware.api [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1153.076853] env[65503]: value = "task-4450822" [ 1153.076853] env[65503]: _type = "Task" [ 1153.076853] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.086021] env[65503]: DEBUG oslo_vmware.api [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.105435] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.105631] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.105826] env[65503]: DEBUG nova.network.neutron [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1153.328367] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bc0c0066-b672-4385-8d68-c14e3635af4e] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1153.501974] env[65503]: DEBUG nova.scheduler.client.report [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.588915] env[65503]: DEBUG oslo_vmware.api [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342075} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.589466] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1153.589666] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1153.589842] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1153.590020] env[65503]: INFO nova.compute.manager [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1153.590268] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1153.590463] env[65503]: DEBUG nova.compute.manager [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1153.590555] env[65503]: DEBUG nova.network.neutron [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1153.592377] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1153.592377] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1153.592377] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1153.609397] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1153.609708] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1153.610030] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1153.630531] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1153.735765] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1153.735765] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1153.795414] env[65503]: WARNING neutronclient.v2_0.client [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1153.796164] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1153.796509] env[65503]: WARNING openstack [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1153.832720] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f0e4c7e9-f420-4f7f-9479-27b9f6aec8ee] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1153.841806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.842249] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.842327] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "b7f55645-f152-4fc9-a962-393f9a9b9c55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.842453] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.842738] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.845509] env[65503]: INFO nova.compute.manager [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Terminating instance [ 1153.886756] env[65503]: DEBUG nova.network.neutron [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [{"id": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "address": "fa:16:3e:57:76:3d", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cdf0ee-e8", "ovs_interfaceid": "f8cdf0ee-e818-41cd-b8aa-b485ea737879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1154.008727] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.775s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.011289] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.121s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.014628] env[65503]: INFO nova.compute.claims [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.030695] env[65503]: INFO nova.scheduler.client.report [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Deleted allocations for instance bc700871-233c-4aa0-867e-4f166b6f54d1 [ 1154.078292] env[65503]: DEBUG nova.compute.manager [req-53ab0b36-f01a-4f4f-b1af-5059d7d09067 req-86851b49-aa26-46d5-94b4-d68b06a99838 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Received event network-vif-deleted-96370c0c-da2e-4229-82a1-a24b799e6402 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1154.078489] env[65503]: INFO nova.compute.manager [req-53ab0b36-f01a-4f4f-b1af-5059d7d09067 req-86851b49-aa26-46d5-94b4-d68b06a99838 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Neutron deleted interface 96370c0c-da2e-4229-82a1-a24b799e6402; detaching it from the instance and deleting it from the info cache [ 1154.078653] env[65503]: DEBUG nova.network.neutron [req-53ab0b36-f01a-4f4f-b1af-5059d7d09067 req-86851b49-aa26-46d5-94b4-d68b06a99838 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1154.335846] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 2effe3e4-ea22-4d9f-8f5c-38ee661611e3] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1154.348700] env[65503]: DEBUG nova.compute.manager [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1154.348933] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1154.350195] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e310903d-d7fa-4d5f-8b10-746063648800 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.359402] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.360955] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-929a9aa5-24ed-47ec-8f9f-0ce9227abba4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.369071] env[65503]: DEBUG oslo_vmware.api [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1154.369071] env[65503]: value = "task-4450823" [ 1154.369071] env[65503]: _type = "Task" [ 1154.369071] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.379401] env[65503]: DEBUG oslo_vmware.api [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.389487] env[65503]: DEBUG oslo_concurrency.lockutils [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.390736] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ebe965-b5d0-4548-9382-1afe8166e4fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.399807] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048ddaa7-ed86-44a7-bf84-da78995b8591 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.539939] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a0be86be-dc65-4f55-9899-20a1df89744c tempest-AttachVolumeNegativeTest-462648042 tempest-AttachVolumeNegativeTest-462648042-project-member] Lock "bc700871-233c-4aa0-867e-4f166b6f54d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.420s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.556567] env[65503]: DEBUG nova.network.neutron [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1154.583387] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f70fc05-a3ef-4c26-a902-0114ffe33adb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.595197] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e82afa0-31ce-4f3b-aff1-36af0369c9bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.630274] env[65503]: DEBUG nova.compute.manager [req-53ab0b36-f01a-4f4f-b1af-5059d7d09067 req-86851b49-aa26-46d5-94b4-d68b06a99838 service nova] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Detach interface failed, port_id=96370c0c-da2e-4229-82a1-a24b799e6402, reason: Instance b00a98a4-4865-4a02-a353-3d1da9ef0e51 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1154.839760] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: dcc876ae-075f-48d2-81a4-a1b780d6fdec] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1154.882303] env[65503]: DEBUG oslo_vmware.api [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450823, 'name': PowerOffVM_Task, 'duration_secs': 0.199525} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.882573] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1154.882735] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1154.882999] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a235a66f-2a5b-483d-9c71-37ec165122fc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.953878] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1154.954020] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1154.954218] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleting the datastore file [datastore1] b7f55645-f152-4fc9-a962-393f9a9b9c55 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1154.955158] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6d56ea9-5ce7-41e2-a60f-0b0afa8e5544 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.963713] env[65503]: DEBUG oslo_vmware.api [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1154.963713] env[65503]: value = "task-4450825" [ 1154.963713] env[65503]: _type = "Task" [ 1154.963713] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.975844] env[65503]: DEBUG oslo_vmware.api [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450825, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.059323] env[65503]: INFO nova.compute.manager [-] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Took 1.47 seconds to deallocate network for instance. [ 1155.212481] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c0d437-5818-4d48-adc7-1fd87c3018d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.221623] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a630eff3-d648-4cd1-a2b9-1c85bbf66b1c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.254958] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4354692f-e89a-4aa1-8319-1c358bc6dcde {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.264137] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0e03e8-d312-4ef6-bbe9-4345c97d3f4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.280992] env[65503]: DEBUG nova.compute.provider_tree [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.345558] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 708ed8ab-0ec9-457c-966d-b11c55895981] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1155.476375] env[65503]: DEBUG oslo_vmware.api [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450825, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220333} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.476688] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1155.476909] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1155.477106] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1155.477293] env[65503]: INFO nova.compute.manager [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1155.477602] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1155.477881] env[65503]: DEBUG nova.compute.manager [-] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1155.477976] env[65503]: DEBUG nova.network.neutron [-] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1155.478361] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1155.479016] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1155.479281] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1155.498137] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.498592] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8919a1d2-e8b0-4251-b47b-11bfd4c40970 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.508585] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1155.508585] env[65503]: value = "task-4450827" [ 1155.508585] env[65503]: _type = "Task" [ 1155.508585] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.522745] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.537704] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1155.569030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.784240] env[65503]: DEBUG nova.scheduler.client.report [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.849234] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 0001f4db-3073-411c-8d60-6d8528ef263a] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1156.023505] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450827, 'name': PowerOffVM_Task, 'duration_secs': 0.310699} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.024502] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.025278] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1156.025502] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1156.025912] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1156.026072] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1156.026232] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1156.026373] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1156.026577] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1156.026735] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1156.026901] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1156.027076] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1156.027244] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1156.033612] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-173cc5db-89a9-4d0d-a799-7a9c871ca8d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.045017] env[65503]: DEBUG nova.compute.manager [req-1c0716f8-f06c-40d8-ba86-33a62c0fd01a req-6759812f-b42b-4cf2-afa1-dd1f9d5f7510 service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Received event network-vif-deleted-7467737c-bad8-423a-85d9-f5870d27aebc {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1156.045217] env[65503]: INFO nova.compute.manager [req-1c0716f8-f06c-40d8-ba86-33a62c0fd01a req-6759812f-b42b-4cf2-afa1-dd1f9d5f7510 service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Neutron deleted interface 7467737c-bad8-423a-85d9-f5870d27aebc; detaching it from the instance and deleting it from the info cache [ 1156.045382] env[65503]: DEBUG nova.network.neutron [req-1c0716f8-f06c-40d8-ba86-33a62c0fd01a req-6759812f-b42b-4cf2-afa1-dd1f9d5f7510 service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1156.054068] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1156.054068] env[65503]: value = "task-4450828" [ 1156.054068] env[65503]: _type = "Task" [ 1156.054068] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.065454] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450828, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.291091] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.291455] env[65503]: DEBUG nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1156.294948] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.726s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.295063] env[65503]: DEBUG nova.objects.instance [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'resources' on Instance uuid b00a98a4-4865-4a02-a353-3d1da9ef0e51 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1156.356100] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 429b7542-c288-4a7a-9032-09881938b256] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1156.517281] env[65503]: DEBUG nova.network.neutron [-] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1156.548468] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c72610b8-9ab3-470d-8d5e-8bbbd75b999f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.562783] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a768c69-5e17-4946-9c96-ace50e74f955 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.578586] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450828, 'name': ReconfigVM_Task, 'duration_secs': 0.203935} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.580045] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6101dc5b-2bce-4870-9e4a-aabb830ea55b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.604431] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1156.604728] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1156.604888] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1156.605101] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1156.605254] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1156.605415] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1156.605626] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1156.605838] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1156.606046] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1156.606254] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1156.606462] env[65503]: DEBUG nova.virt.hardware [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1156.618155] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-903ba0a4-221c-43ad-b42b-e9d3741f0418 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.621369] env[65503]: DEBUG nova.compute.manager [req-1c0716f8-f06c-40d8-ba86-33a62c0fd01a req-6759812f-b42b-4cf2-afa1-dd1f9d5f7510 service nova] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Detach interface failed, port_id=7467737c-bad8-423a-85d9-f5870d27aebc, reason: Instance b7f55645-f152-4fc9-a962-393f9a9b9c55 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1156.626498] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1156.626498] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b82535-000e-5f06-0399-affab107d038" [ 1156.626498] env[65503]: _type = "Task" [ 1156.626498] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.635730] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b82535-000e-5f06-0399-affab107d038, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.798478] env[65503]: DEBUG nova.compute.utils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1156.804207] env[65503]: DEBUG nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1156.804439] env[65503]: DEBUG nova.network.neutron [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1156.804740] env[65503]: WARNING neutronclient.v2_0.client [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1156.805050] env[65503]: WARNING neutronclient.v2_0.client [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1156.805725] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1156.806021] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1156.859201] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 52701da5-2908-40f8-b1c5-bc30f17d51a0] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1156.867630] env[65503]: DEBUG nova.policy [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07ffde40fb9f4aee8cbe3e13b3f6fd6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da471ceaf0f348cd87e42d3c03fdfeb8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1156.973053] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6807fe0-835e-4a85-91f6-be1d6cae6ea5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.982414] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb92580-5da9-4360-9713-176239fa38a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.013186] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b48722-7807-4ad9-aec6-f1fb36ef6fc8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.021138] env[65503]: INFO nova.compute.manager [-] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Took 1.54 seconds to deallocate network for instance. [ 1157.024031] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff92ed3-460b-4696-b35e-376999fbd8fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.043227] env[65503]: DEBUG nova.compute.provider_tree [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.138237] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b82535-000e-5f06-0399-affab107d038, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.143653] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1157.144012] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-513a477b-7706-4077-bbf4-2d902bb50d56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.163467] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1157.163467] env[65503]: value = "task-4450829" [ 1157.163467] env[65503]: _type = "Task" [ 1157.163467] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.164489] env[65503]: DEBUG nova.network.neutron [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Successfully created port: e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1157.175724] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450829, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.305353] env[65503]: DEBUG nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1157.362516] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 31ee1061-6199-4341-86ab-9ae606b269fe] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1157.534419] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.546156] env[65503]: DEBUG nova.scheduler.client.report [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.677799] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450829, 'name': ReconfigVM_Task, 'duration_secs': 0.228073} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.678173] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1157.679041] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8daf2123-690a-4458-9cef-1d0410415e2f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.705424] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1157.705777] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-755514cb-b1e4-45f6-b198-a2f9fccf4880 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.724688] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1157.724688] env[65503]: value = "task-4450830" [ 1157.724688] env[65503]: _type = "Task" [ 1157.724688] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.737471] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.867591] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: e4c1c94b-744f-4bed-8e68-3b3f9de7db44] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1158.051513] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.054246] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.520s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.054566] env[65503]: DEBUG nova.objects.instance [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lazy-loading 'resources' on Instance uuid b7f55645-f152-4fc9-a962-393f9a9b9c55 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.075511] env[65503]: INFO nova.scheduler.client.report [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted allocations for instance b00a98a4-4865-4a02-a353-3d1da9ef0e51 [ 1158.235496] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450830, 'name': ReconfigVM_Task, 'duration_secs': 0.320757} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.235814] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a/2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1158.236734] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e91e58-fa62-4c6a-9e0c-849b421ed4c6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.259153] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275d3b12-cd4c-4853-ba29-5c1e3360b566 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.297652] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275f0808-1a98-4778-96ed-e8fc41b4cfdf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.321476] env[65503]: DEBUG nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1158.324488] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37c85ff-6f56-421e-8412-498144728bf1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.332827] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1158.333139] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70660042-a9f7-4163-a5c3-dafb75c396c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.341989] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1158.341989] env[65503]: value = "task-4450831" [ 1158.341989] env[65503]: _type = "Task" [ 1158.341989] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.350277] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1158.350469] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1158.350622] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1158.350800] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1158.350940] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1158.351098] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1158.351306] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1158.351457] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1158.351619] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1158.351987] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1158.352037] env[65503]: DEBUG nova.virt.hardware [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1158.352847] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d0269b-4134-4748-a195-ecf1280f13f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.358832] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450831, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.365927] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd635d4-bd09-4c35-b703-5749140c8de6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.370799] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 972a50ed-759a-4312-9314-9bf01a03fc3a] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1158.582894] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2ffbda9b-2095-4eba-80ec-57aff48f9584 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "b00a98a4-4865-4a02-a353-3d1da9ef0e51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.180s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.618157] env[65503]: DEBUG nova.compute.manager [req-6f829583-fd11-480b-99e1-39efa58a940a req-967e7735-1150-460d-98c1-dbc1d2517c72 service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Received event network-vif-plugged-e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1158.618157] env[65503]: DEBUG oslo_concurrency.lockutils [req-6f829583-fd11-480b-99e1-39efa58a940a req-967e7735-1150-460d-98c1-dbc1d2517c72 service nova] Acquiring lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.618774] env[65503]: DEBUG oslo_concurrency.lockutils [req-6f829583-fd11-480b-99e1-39efa58a940a req-967e7735-1150-460d-98c1-dbc1d2517c72 service nova] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.618774] env[65503]: DEBUG oslo_concurrency.lockutils [req-6f829583-fd11-480b-99e1-39efa58a940a req-967e7735-1150-460d-98c1-dbc1d2517c72 service nova] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.618943] env[65503]: DEBUG nova.compute.manager [req-6f829583-fd11-480b-99e1-39efa58a940a req-967e7735-1150-460d-98c1-dbc1d2517c72 service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] No waiting events found dispatching network-vif-plugged-e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1158.619172] env[65503]: WARNING nova.compute.manager [req-6f829583-fd11-480b-99e1-39efa58a940a req-967e7735-1150-460d-98c1-dbc1d2517c72 service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Received unexpected event network-vif-plugged-e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 for instance with vm_state building and task_state spawning. [ 1158.742447] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a570c4-7b6d-44bf-9397-2088f016047b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.751588] env[65503]: DEBUG nova.network.neutron [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Successfully updated port: e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1158.755187] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c086a5a9-266a-45a2-9f2a-7c4df65b68f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.786012] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665b9159-1821-4f26-8a09-0b33e1b9a88d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.795329] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad59b04-18a6-4ac4-87d5-0245c9893e54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.811547] env[65503]: DEBUG nova.compute.provider_tree [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.854732] env[65503]: DEBUG oslo_vmware.api [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450831, 'name': PowerOnVM_Task, 'duration_secs': 0.443451} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.855102] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.875067] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bcd845e2-5a89-4eef-bb76-33d69834bbc1] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1159.259049] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.259049] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.259216] env[65503]: DEBUG nova.network.neutron [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1159.315188] env[65503]: DEBUG nova.scheduler.client.report [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1159.377937] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: e52bfd02-d1f6-4e32-a3c2-b5feebfc6eaf] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1159.764373] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1159.764373] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1159.809624] env[65503]: DEBUG nova.network.neutron [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1159.822183] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.768s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.847838] env[65503]: INFO nova.scheduler.client.report [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleted allocations for instance b7f55645-f152-4fc9-a962-393f9a9b9c55 [ 1159.850801] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1159.851332] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1159.881732] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bdbae548-eefc-4e59-8053-f4b8e232580d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1159.905451] env[65503]: WARNING neutronclient.v2_0.client [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1159.906410] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1159.906641] env[65503]: WARNING openstack [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1159.934551] env[65503]: INFO nova.compute.manager [None req-44b514f2-4485-4619-90e7-3507d7781211 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance to original state: 'active' [ 1160.020911] env[65503]: DEBUG nova.network.neutron [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Updating instance_info_cache with network_info: [{"id": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "address": "fa:16:3e:72:c7:8d", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d6afe7-1d", "ovs_interfaceid": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1160.366225] env[65503]: DEBUG oslo_concurrency.lockutils [None req-25223803-4545-4a02-a20a-d49244522073 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "b7f55645-f152-4fc9-a962-393f9a9b9c55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.524s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.385312] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: e74fe378-737a-4732-9a2d-b889a436b8a3] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1160.523621] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.524024] env[65503]: DEBUG nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Instance network_info: |[{"id": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "address": "fa:16:3e:72:c7:8d", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d6afe7-1d", "ovs_interfaceid": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1160.524493] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:c7:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8d6afe7-1d1d-402a-a7f1-86cd2d14c559', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1160.532270] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1160.532894] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1160.533225] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23458724-a3ea-4eed-a056-4a98436707ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.557284] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1160.557284] env[65503]: value = "task-4450834" [ 1160.557284] env[65503]: _type = "Task" [ 1160.557284] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.567740] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450834, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.653245] env[65503]: DEBUG nova.compute.manager [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Received event network-changed-e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1160.653559] env[65503]: DEBUG nova.compute.manager [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Refreshing instance network info cache due to event network-changed-e8d6afe7-1d1d-402a-a7f1-86cd2d14c559. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1160.653834] env[65503]: DEBUG oslo_concurrency.lockutils [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Acquiring lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.654052] env[65503]: DEBUG oslo_concurrency.lockutils [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Acquired lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.654291] env[65503]: DEBUG nova.network.neutron [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Refreshing network info cache for port e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1160.890474] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: ad8676f9-0433-49bf-bc72-e36fa010ff1d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1160.915441] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.915674] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.068546] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450834, 'name': CreateVM_Task, 'duration_secs': 0.333375} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.068809] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1161.069374] env[65503]: WARNING neutronclient.v2_0.client [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.069829] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.069965] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.070341] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1161.070644] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc4dbe7e-7782-4fed-914f-1ac4e4a2feed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.076511] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1161.076511] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e20441-c8bf-ff62-561b-22503e440fc1" [ 1161.076511] env[65503]: _type = "Task" [ 1161.076511] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.085353] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e20441-c8bf-ff62-561b-22503e440fc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.157778] env[65503]: WARNING neutronclient.v2_0.client [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.158430] env[65503]: WARNING openstack [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.159013] env[65503]: WARNING openstack [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.396832] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 083797a8-8daf-493b-89de-7ae9137ed538] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1161.418804] env[65503]: DEBUG nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1161.588493] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e20441-c8bf-ff62-561b-22503e440fc1, 'name': SearchDatastore_Task, 'duration_secs': 0.014275} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.589052] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.589506] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1161.589861] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.590129] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.590413] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.590782] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cc91adb-77e3-41c7-b211-96d723363a6b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.609019] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.609019] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1161.609019] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45510e3e-0d20-47f7-9ce9-d63b96b87cbe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.615447] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1161.615447] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ce6a0f-e967-b617-46f8-c86570309af2" [ 1161.615447] env[65503]: _type = "Task" [ 1161.615447] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.629373] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ce6a0f-e967-b617-46f8-c86570309af2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.709889] env[65503]: WARNING openstack [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.710310] env[65503]: WARNING openstack [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.766029] env[65503]: WARNING neutronclient.v2_0.client [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.766762] env[65503]: WARNING openstack [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.767161] env[65503]: WARNING openstack [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.842471] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.842735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.843075] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.843176] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.843299] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.845614] env[65503]: INFO nova.compute.manager [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Terminating instance [ 1161.856190] env[65503]: DEBUG nova.network.neutron [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Updated VIF entry in instance network info cache for port e8d6afe7-1d1d-402a-a7f1-86cd2d14c559. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1161.856504] env[65503]: DEBUG nova.network.neutron [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Updating instance_info_cache with network_info: [{"id": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "address": "fa:16:3e:72:c7:8d", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d6afe7-1d", "ovs_interfaceid": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1161.898777] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: a57486e1-82e3-48d5-99fe-c89b300a2136] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1161.944523] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.944850] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.947585] env[65503]: INFO nova.compute.claims [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1162.126923] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ce6a0f-e967-b617-46f8-c86570309af2, 'name': SearchDatastore_Task, 'duration_secs': 0.035558} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.127736] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3f570f4-41ca-4f04-92f9-31109548dbad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.133303] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1162.133303] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52424dbd-f860-f4d4-bbe1-3699b9fcd39e" [ 1162.133303] env[65503]: _type = "Task" [ 1162.133303] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.140680] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52424dbd-f860-f4d4-bbe1-3699b9fcd39e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.349759] env[65503]: DEBUG nova.compute.manager [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1162.350039] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1162.350343] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2767574-f1f6-48d5-b037-e3855d3754cf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.359556] env[65503]: DEBUG oslo_concurrency.lockutils [req-059a7f07-3ec2-425c-9e9a-829f69f0a189 req-e94e40d8-e286-433e-ba67-8cfe16e7fc8e service nova] Releasing lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.359993] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1162.359993] env[65503]: value = "task-4450836" [ 1162.359993] env[65503]: _type = "Task" [ 1162.359993] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.372098] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450836, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.402209] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 62a18449-7cec-4785-a340-d0450adc8044] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1162.644240] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52424dbd-f860-f4d4-bbe1-3699b9fcd39e, 'name': SearchDatastore_Task, 'duration_secs': 0.04235} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.644492] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.644798] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/6f9a7c04-78dd-46b2-a42d-6ff218478f19.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1162.645094] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9654daeb-7e32-4ee6-9c20-6f76252bb061 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.652744] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1162.652744] env[65503]: value = "task-4450837" [ 1162.652744] env[65503]: _type = "Task" [ 1162.652744] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.661706] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.871938] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450836, 'name': PowerOffVM_Task, 'duration_secs': 0.223215} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.872297] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.872515] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1162.872713] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870485', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'name': 'volume-67d27af0-2341-4a81-8a74-90b6123a2a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a', 'attached_at': '2025-11-14T15:57:04.000000', 'detached_at': '', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'serial': '67d27af0-2341-4a81-8a74-90b6123a2a7d'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1162.873844] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709418f8-5b00-4e7d-84df-4c00005f0677 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.898206] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a7b5ad-336c-4504-9475-96e47c9ed2b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.906638] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f840b178-fd54-4c84-808c-a14c99a5ecdd] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1162.909286] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f12f30-96b1-4444-934a-275da4d28948 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.936926] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49818a0-02a6-4d18-981d-6d7cb1e0e711 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.962137] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The volume has not been displaced from its original location: [datastore2] volume-67d27af0-2341-4a81-8a74-90b6123a2a7d/volume-67d27af0-2341-4a81-8a74-90b6123a2a7d.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1162.968825] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1162.970557] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec584b5c-b134-4b83-9a22-077c532e56be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.994076] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1162.994076] env[65503]: value = "task-4450838" [ 1162.994076] env[65503]: _type = "Task" [ 1162.994076] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.009356] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450838, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.134257] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846d8b8a-ab59-4415-aea5-606df5443af7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.141655] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b84f70-219e-49ba-a75a-4b2699c1954e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.174776] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69470eb-f8b6-4c33-a2e5-82e79898aed9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.183328] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479973} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.185083] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/6f9a7c04-78dd-46b2-a42d-6ff218478f19.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1163.185309] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1163.185594] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba930dea-2a34-4e3d-8833-6046b2d8e8be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.188303] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f576a68-096b-4b67-8b18-03ddff2a540a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.201915] env[65503]: DEBUG nova.compute.provider_tree [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.204548] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1163.204548] env[65503]: value = "task-4450839" [ 1163.204548] env[65503]: _type = "Task" [ 1163.204548] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.214271] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450839, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.413572] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: a22f589e-7c40-4023-9a4c-9ab2a76faa94] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1163.505194] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450838, 'name': ReconfigVM_Task, 'duration_secs': 0.314353} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.505974] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1163.513219] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-547b4a09-47b3-4d81-a6c3-51bc3ccdfc16 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.539073] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1163.539073] env[65503]: value = "task-4450840" [ 1163.539073] env[65503]: _type = "Task" [ 1163.539073] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.553040] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450840, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.706973] env[65503]: DEBUG nova.scheduler.client.report [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.722121] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450839, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071989} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.723095] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.723934] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0d96e0-a4ae-4207-b088-f6c3d66f8521 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.747695] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/6f9a7c04-78dd-46b2-a42d-6ff218478f19.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.748516] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1efa2ec2-9c05-4e8e-8ebf-8af8067f84f6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.769992] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1163.769992] env[65503]: value = "task-4450841" [ 1163.769992] env[65503]: _type = "Task" [ 1163.769992] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.779106] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.917986] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: ff256d3f-af88-4f01-bdfd-cf89e06ab364] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1164.050555] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450840, 'name': ReconfigVM_Task, 'duration_secs': 0.151138} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.051033] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870485', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'name': 'volume-67d27af0-2341-4a81-8a74-90b6123a2a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a', 'attached_at': '2025-11-14T15:57:04.000000', 'detached_at': '', 'volume_id': '67d27af0-2341-4a81-8a74-90b6123a2a7d', 'serial': '67d27af0-2341-4a81-8a74-90b6123a2a7d'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1164.051436] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1164.052577] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb251c4a-3eac-4ffa-81e5-93cb3830bd08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.061386] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1164.061386] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03d8abc5-94b5-493e-8e14-a12d58c0d797 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.123464] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.123711] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.133125] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1164.133340] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1164.133496] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleting the datastore file [datastore1] 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1164.134427] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5dc016e-437f-47c1-a2d9-62f2f16e4c54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.142640] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1164.142640] env[65503]: value = "task-4450843" [ 1164.142640] env[65503]: _type = "Task" [ 1164.142640] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.151079] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.216033] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.216606] env[65503]: DEBUG nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1164.281217] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450841, 'name': ReconfigVM_Task, 'duration_secs': 0.331375} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.281540] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/6f9a7c04-78dd-46b2-a42d-6ff218478f19.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1164.282197] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a68cfe5-ace7-4019-b98e-e6897a4b1c7d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.290466] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1164.290466] env[65503]: value = "task-4450844" [ 1164.290466] env[65503]: _type = "Task" [ 1164.290466] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.301159] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450844, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.421948] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: b6cda94b-2894-4cf0-8522-6593df9723bd] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1164.628219] env[65503]: DEBUG nova.compute.utils [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1164.660955] env[65503]: DEBUG oslo_vmware.api [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161017} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.661405] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.661722] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.662026] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.662341] env[65503]: INFO nova.compute.manager [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1164.662745] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1164.663071] env[65503]: DEBUG nova.compute.manager [-] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1164.663253] env[65503]: DEBUG nova.network.neutron [-] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1164.663777] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.664580] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1164.665013] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1164.708337] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.721881] env[65503]: DEBUG nova.compute.utils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1164.723923] env[65503]: DEBUG nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1164.723923] env[65503]: DEBUG nova.network.neutron [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1164.724098] env[65503]: WARNING neutronclient.v2_0.client [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.724419] env[65503]: WARNING neutronclient.v2_0.client [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.724984] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1164.725333] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1164.772141] env[65503]: DEBUG nova.policy [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f06102af25d4279a0104a75b62014fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '592efb180976432cbcecb9ad421e1bd1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.803307] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450844, 'name': Rename_Task, 'duration_secs': 0.407039} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.805868] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1164.806529] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d4eda5b-1aeb-401a-8d4e-f5a7fc1ae0a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.814471] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1164.814471] env[65503]: value = "task-4450845" [ 1164.814471] env[65503]: _type = "Task" [ 1164.814471] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.824203] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450845, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.925982] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 9809fc8b-3842-4ce3-bb63-8ea37ee3bf51] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1165.077910] env[65503]: DEBUG nova.network.neutron [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Successfully created port: c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1165.132027] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.233176] env[65503]: DEBUG nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1165.280846] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.281946] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.282366] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.282629] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.282957] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.286562] env[65503]: INFO nova.compute.manager [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Terminating instance [ 1165.327194] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450845, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.432615] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 45a4b511-aa6a-433d-b136-f53686db9575] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1165.650159] env[65503]: DEBUG nova.network.neutron [-] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1165.727548] env[65503]: DEBUG nova.compute.manager [req-2c345ea6-d146-45ca-83a4-b85b9a2640c3 req-4943ae43-7f35-4de4-9bfe-325c86f7204b service nova] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Received event network-vif-deleted-f8cdf0ee-e818-41cd-b8aa-b485ea737879 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1165.791899] env[65503]: DEBUG nova.compute.manager [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1165.792147] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1165.793359] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e627ce-ba8a-475d-b257-03ae72a39740 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.803378] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.803641] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0392e757-9222-4d5a-b505-52d73a58cafb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.811041] env[65503]: DEBUG oslo_vmware.api [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1165.811041] env[65503]: value = "task-4450846" [ 1165.811041] env[65503]: _type = "Task" [ 1165.811041] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.822868] env[65503]: DEBUG oslo_vmware.api [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.829721] env[65503]: DEBUG oslo_vmware.api [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450845, 'name': PowerOnVM_Task, 'duration_secs': 0.543767} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.830116] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1165.830348] env[65503]: INFO nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Took 7.51 seconds to spawn the instance on the hypervisor. [ 1165.830517] env[65503]: DEBUG nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1165.831645] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17552383-e207-45c8-8b9f-4116b2035642 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.936648] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 606b8e9f-67c0-4d5c-85ab-ca35f8b31977] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1166.152757] env[65503]: INFO nova.compute.manager [-] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Took 1.49 seconds to deallocate network for instance. [ 1166.208193] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.208511] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.209222] env[65503]: INFO nova.compute.manager [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Attaching volume 94222922-f7c9-48bc-96b8-a28f0f9ddbbe to /dev/sdb [ 1166.245336] env[65503]: DEBUG nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1166.250140] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24539ed-6d40-4c7e-9497-f1ec43db2365 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.259884] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2597387d-6aad-460b-af13-0d56725fe3b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.278802] env[65503]: DEBUG nova.virt.block_device [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updating existing volume attachment record: 052832a9-22d0-4993-bb20-128925f99cb4 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1166.284528] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1166.284898] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1166.285144] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1166.285429] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1166.285621] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1166.285878] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1166.286374] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1166.286633] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1166.286889] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1166.287145] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1166.287392] env[65503]: DEBUG nova.virt.hardware [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1166.288681] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb78f35-fb08-4090-90fe-f8a1f7874967 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.299603] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de177d0-0000-418c-a412-a78ed2ce10b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.331933] env[65503]: DEBUG oslo_vmware.api [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450846, 'name': PowerOffVM_Task, 'duration_secs': 0.465891} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.331933] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1166.332067] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1166.332571] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6f83ef3-719d-4af6-9fc6-7ac5a6f86eac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.350539] env[65503]: INFO nova.compute.manager [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Took 13.48 seconds to build instance. [ 1166.442774] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 38e9a714-87f8-422c-9cc5-09b6aec76198] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1166.448139] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1166.448420] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1166.448643] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleting the datastore file [datastore1] ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.448958] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-038fe65c-93a2-46aa-a0fc-b6e9511241b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.457088] env[65503]: DEBUG oslo_vmware.api [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1166.457088] env[65503]: value = "task-4450849" [ 1166.457088] env[65503]: _type = "Task" [ 1166.457088] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.466600] env[65503]: DEBUG oslo_vmware.api [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.708021] env[65503]: INFO nova.compute.manager [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Took 0.55 seconds to detach 1 volumes for instance. [ 1166.723029] env[65503]: DEBUG nova.network.neutron [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Successfully updated port: c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1166.797154] env[65503]: DEBUG nova.compute.manager [req-07b9dc53-11ff-49c9-97ee-d04bd0968484 req-eff98b5f-ec51-4d4f-8801-bd08e78e9835 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-vif-plugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1166.797388] env[65503]: DEBUG oslo_concurrency.lockutils [req-07b9dc53-11ff-49c9-97ee-d04bd0968484 req-eff98b5f-ec51-4d4f-8801-bd08e78e9835 service nova] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.797546] env[65503]: DEBUG oslo_concurrency.lockutils [req-07b9dc53-11ff-49c9-97ee-d04bd0968484 req-eff98b5f-ec51-4d4f-8801-bd08e78e9835 service nova] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.797746] env[65503]: DEBUG oslo_concurrency.lockutils [req-07b9dc53-11ff-49c9-97ee-d04bd0968484 req-eff98b5f-ec51-4d4f-8801-bd08e78e9835 service nova] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.797856] env[65503]: DEBUG nova.compute.manager [req-07b9dc53-11ff-49c9-97ee-d04bd0968484 req-eff98b5f-ec51-4d4f-8801-bd08e78e9835 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] No waiting events found dispatching network-vif-plugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1166.798075] env[65503]: WARNING nova.compute.manager [req-07b9dc53-11ff-49c9-97ee-d04bd0968484 req-eff98b5f-ec51-4d4f-8801-bd08e78e9835 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received unexpected event network-vif-plugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 for instance with vm_state building and task_state spawning. [ 1166.853436] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ca0bf6e-6de0-44c9-8747-9b63972f9132 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.986s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.946652] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: a197b590-1f74-4241-9579-2f2d3bb89a1d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1166.968618] env[65503]: DEBUG oslo_vmware.api [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2255} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.968886] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.969133] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.969374] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.969697] env[65503]: INFO nova.compute.manager [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1166.969786] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1166.969972] env[65503]: DEBUG nova.compute.manager [-] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1166.970078] env[65503]: DEBUG nova.network.neutron [-] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1166.970315] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1166.970877] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1166.971152] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1167.024667] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1167.215332] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.215669] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.215937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.228552] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.228552] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.228958] env[65503]: DEBUG nova.network.neutron [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1167.248134] env[65503]: INFO nova.scheduler.client.report [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted allocations for instance 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a [ 1167.450309] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 9dbaff4f-ab02-481b-b51f-b134021d277c] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1167.628063] env[65503]: INFO nova.compute.manager [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Rebuilding instance [ 1167.674220] env[65503]: DEBUG nova.compute.manager [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1167.675378] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f6ff94-d425-49f9-92fa-391c0e454978 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.733100] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1167.733557] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1167.757934] env[65503]: DEBUG oslo_concurrency.lockutils [None req-940a1a11-a534-4b27-88f0-bfc3a8ffe64b tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.915s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.780318] env[65503]: DEBUG nova.network.neutron [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1167.799347] env[65503]: DEBUG nova.network.neutron [-] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1167.801700] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1167.802316] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1167.870915] env[65503]: WARNING neutronclient.v2_0.client [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1167.871630] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1167.872092] env[65503]: WARNING openstack [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1167.953703] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 2ab1cd4b-f2c0-4264-8463-8127a733a1c5] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1167.964157] env[65503]: DEBUG nova.network.neutron [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7da6c50-23", "ovs_interfaceid": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1168.310334] env[65503]: INFO nova.compute.manager [-] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Took 1.34 seconds to deallocate network for instance. [ 1168.362109] env[65503]: INFO nova.compute.manager [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Rescuing [ 1168.362399] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.362589] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.362728] env[65503]: DEBUG nova.network.neutron [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1168.457316] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 1e92795e-cf30-4175-9e31-c29278f3e9e0] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1168.467312] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.467695] env[65503]: DEBUG nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Instance network_info: |[{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7da6c50-23", "ovs_interfaceid": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1168.468178] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:5c:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7da6c50-239a-487b-91ca-3e82cb4d3794', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1168.476586] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1168.476840] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1168.477110] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0712c44b-63a3-4b28-824f-3b93b941b5de {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.499747] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1168.499747] env[65503]: value = "task-4450852" [ 1168.499747] env[65503]: _type = "Task" [ 1168.499747] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.509302] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450852, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.691221] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.691457] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24d8f143-2af7-4aa4-9543-36115165bf1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.701013] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1168.701013] env[65503]: value = "task-4450854" [ 1168.701013] env[65503]: _type = "Task" [ 1168.701013] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.710769] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450854, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.818174] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.818174] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.818385] env[65503]: DEBUG nova.objects.instance [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lazy-loading 'resources' on Instance uuid ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.837886] env[65503]: DEBUG nova.compute.manager [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1168.838454] env[65503]: DEBUG nova.compute.manager [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing instance network info cache due to event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1168.838709] env[65503]: DEBUG oslo_concurrency.lockutils [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Acquiring lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.838849] env[65503]: DEBUG oslo_concurrency.lockutils [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Acquired lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.839060] env[65503]: DEBUG nova.network.neutron [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1168.866460] env[65503]: WARNING neutronclient.v2_0.client [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1168.867455] env[65503]: WARNING openstack [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1168.867826] env[65503]: WARNING openstack [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1168.960594] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: f7d9d4f0-1777-4d0d-a5d2-e7e1bf18322d] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1169.007936] env[65503]: WARNING openstack [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1169.008395] env[65503]: WARNING openstack [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1169.018481] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450852, 'name': CreateVM_Task, 'duration_secs': 0.501767} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.018764] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1169.019208] env[65503]: WARNING neutronclient.v2_0.client [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1169.019629] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.019798] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.020116] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1169.020386] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec08cc2e-c05a-4e7d-8e7b-1aade3b02012 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.027422] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1169.027422] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210f6ac-b77e-d9cb-3134-b3c2bcf10a0e" [ 1169.027422] env[65503]: _type = "Task" [ 1169.027422] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.037075] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210f6ac-b77e-d9cb-3134-b3c2bcf10a0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.058621] env[65503]: WARNING neutronclient.v2_0.client [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1169.059354] env[65503]: WARNING openstack [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1169.059680] env[65503]: WARNING openstack [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1169.138937] env[65503]: DEBUG nova.network.neutron [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Updating instance_info_cache with network_info: [{"id": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "address": "fa:16:3e:72:c7:8d", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d6afe7-1d", "ovs_interfaceid": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1169.211376] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450854, 'name': PowerOffVM_Task, 'duration_secs': 0.222485} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.211586] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.212335] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.212596] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48d9f6aa-64b2-4e09-8096-31c3607fae54 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.220566] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1169.220566] env[65503]: value = "task-4450855" [ 1169.220566] env[65503]: _type = "Task" [ 1169.220566] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.231898] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1169.232050] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1169.232156] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870481', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'name': 'volume-856eed9a-2608-4d0d-b7b3-52e61d723973', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b98ff40-1580-4175-adc5-66ca8977990a', 'attached_at': '', 'detached_at': '', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'serial': '856eed9a-2608-4d0d-b7b3-52e61d723973'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1169.232908] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c94322-7bff-47f7-af42-69bcf3e89492 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.252215] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d043210f-4908-4d37-9c81-c3f4dafc871a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.260320] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ece3b9c-c8a4-4075-9af6-7f474bb7514a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.280800] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c537410-b13d-468b-8dd1-e74838df7528 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.298845] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] The volume has not been displaced from its original location: [datastore2] volume-856eed9a-2608-4d0d-b7b3-52e61d723973/volume-856eed9a-2608-4d0d-b7b3-52e61d723973.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1169.304410] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1169.304812] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-726e6c2a-49e5-40a4-9634-d5c5590b2916 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.324667] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1169.324667] env[65503]: value = "task-4450856" [ 1169.324667] env[65503]: _type = "Task" [ 1169.324667] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.336491] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450856, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.342275] env[65503]: WARNING neutronclient.v2_0.client [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1169.343078] env[65503]: WARNING openstack [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1169.343472] env[65503]: WARNING openstack [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1169.427904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.427904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.464307] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: ad85eef0-cef7-4900-b193-1737a6c2f17b] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1169.504575] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6786df1-30cc-41a3-a9fe-0a07a3bc45b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.508847] env[65503]: WARNING openstack [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1169.509234] env[65503]: WARNING openstack [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1169.523290] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e056a9e7-d0b1-47e9-b208-d40c1e327755 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.564295] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32142ca6-3c60-4140-a172-371d76c8e0f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.577055] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a641bda4-6eca-4374-beed-3ebe12bd104f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.581572] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5210f6ac-b77e-d9cb-3134-b3c2bcf10a0e, 'name': SearchDatastore_Task, 'duration_secs': 0.0105} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.581947] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.582265] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1169.582516] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.582649] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.582821] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1169.583456] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7334731-c95a-49f4-a951-f36f32a5bd05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.595222] env[65503]: DEBUG nova.compute.provider_tree [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.600340] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1169.600340] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1169.600340] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c15ab62-768b-42d4-8df7-8c52298dcb7d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.606217] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1169.606217] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214dc80-da69-0065-1df2-9a5152bd81ad" [ 1169.606217] env[65503]: _type = "Task" [ 1169.606217] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.617727] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214dc80-da69-0065-1df2-9a5152bd81ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.619015] env[65503]: WARNING neutronclient.v2_0.client [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1169.619636] env[65503]: WARNING openstack [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1169.619981] env[65503]: WARNING openstack [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1169.641539] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.722116] env[65503]: DEBUG nova.network.neutron [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updated VIF entry in instance network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1169.722600] env[65503]: DEBUG nova.network.neutron [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7da6c50-23", "ovs_interfaceid": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1169.836491] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450856, 'name': ReconfigVM_Task, 'duration_secs': 0.182243} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.836686] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1169.841300] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3556dcfd-b890-4ab8-a1be-25e3c9e47809 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.857223] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1169.857223] env[65503]: value = "task-4450857" [ 1169.857223] env[65503]: _type = "Task" [ 1169.857223] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.866489] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450857, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.934487] env[65503]: DEBUG nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1169.967959] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 39266117-e82e-48ae-932a-be04b1a7351a] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1170.100260] env[65503]: DEBUG nova.scheduler.client.report [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1170.118506] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5214dc80-da69-0065-1df2-9a5152bd81ad, 'name': SearchDatastore_Task, 'duration_secs': 0.011116} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.119821] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba059da-e9c5-4608-9ffc-5030b929f836 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.127394] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1170.127394] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52807cb8-fe79-486a-2674-56ac444593a8" [ 1170.127394] env[65503]: _type = "Task" [ 1170.127394] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.138223] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52807cb8-fe79-486a-2674-56ac444593a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.226051] env[65503]: DEBUG oslo_concurrency.lockutils [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] Releasing lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.226361] env[65503]: DEBUG nova.compute.manager [req-9fc5deec-9296-4643-95f6-191ec52743f6 req-ac329bc3-c02c-4b98-9217-5043519ac031 service nova] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Received event network-vif-deleted-6c742bed-a172-46d4-9ac0-cc3025ddff75 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1170.369062] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450857, 'name': ReconfigVM_Task, 'duration_secs': 0.324044} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.369510] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870481', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'name': 'volume-856eed9a-2608-4d0d-b7b3-52e61d723973', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b98ff40-1580-4175-adc5-66ca8977990a', 'attached_at': '', 'detached_at': '', 'volume_id': '856eed9a-2608-4d0d-b7b3-52e61d723973', 'serial': '856eed9a-2608-4d0d-b7b3-52e61d723973'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1170.369893] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1170.370906] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d2ea15-206c-4c53-aa7c-24790ffcadb1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.378800] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1170.379102] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fdcc594-e313-4589-9be2-45a87ed19ff9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.452498] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1170.452751] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1170.452943] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Deleting the datastore file [datastore2] 7b98ff40-1580-4175-adc5-66ca8977990a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1170.453269] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5eddd12b-27f4-4c59-95d2-42b92bd5e1d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.458353] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.461939] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for the task: (returnval){ [ 1170.461939] env[65503]: value = "task-4450859" [ 1170.461939] env[65503]: _type = "Task" [ 1170.461939] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.470752] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.471094] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.605263] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.607677] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.149s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.609271] env[65503]: INFO nova.compute.claims [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.624917] env[65503]: INFO nova.scheduler.client.report [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleted allocations for instance ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046 [ 1170.639579] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52807cb8-fe79-486a-2674-56ac444593a8, 'name': SearchDatastore_Task, 'duration_secs': 0.043919} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.639865] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.640127] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1170.640472] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca6d3300-b2f8-420c-91ca-6d8e524e56e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.648291] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1170.648291] env[65503]: value = "task-4450860" [ 1170.648291] env[65503]: _type = "Task" [ 1170.648291] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.658621] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.836970] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1170.837140] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1170.838113] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c9533c-b715-41ff-842f-b3fa129879e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.856486] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac7d531-c65d-497e-bc81-bfce15b3cd43 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.884797] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe/volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.885217] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b57fad7b-5edd-43ab-a5e4-594fbc5902b0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.908406] env[65503]: DEBUG oslo_vmware.api [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1170.908406] env[65503]: value = "task-4450861" [ 1170.908406] env[65503]: _type = "Task" [ 1170.908406] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.920953] env[65503]: DEBUG oslo_vmware.api [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.972421] env[65503]: DEBUG oslo_vmware.api [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Task: {'id': task-4450859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080567} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.972815] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.973010] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.973190] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1171.033682] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1171.034120] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95546574-af3e-4df7-9128-337c50b64287 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.045784] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d49768-54ab-4e35-855b-b5d35f7fc603 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.081774] env[65503]: ERROR nova.compute.manager [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Failed to detach volume 856eed9a-2608-4d0d-b7b3-52e61d723973 from /dev/sda: nova.exception.InstanceNotFound: Instance 7b98ff40-1580-4175-adc5-66ca8977990a could not be found. [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Traceback (most recent call last): [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 4224, in _do_rebuild_instance [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self.driver.rebuild(**kwargs) [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/driver.py", line 533, in rebuild [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] raise NotImplementedError() [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] NotImplementedError [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] During handling of the above exception, another exception occurred: [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Traceback (most recent call last): [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 3647, in _detach_root_volume [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self.driver.detach_volume(context, old_connection_info, [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] return self._volumeops.detach_volume(connection_info, instance) [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self._detach_volume_vmdk(connection_info, instance) [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] stable_ref.fetch_moref(session) [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] nova.exception.InstanceNotFound: Instance 7b98ff40-1580-4175-adc5-66ca8977990a could not be found. [ 1171.081774] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.135520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11ba8e91-3c82-4697-b280-2791b7e954a3 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.854s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.170165] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450860, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510251} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.170699] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1171.172030] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1171.172030] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57e26c0a-8d67-4fc3-bc3f-5a40fafb2c26 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.181231] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.182038] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c6d1eb6-e7be-44ae-85e2-fbb396a05617 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.185677] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1171.185677] env[65503]: value = "task-4450862" [ 1171.185677] env[65503]: _type = "Task" [ 1171.185677] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.192155] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1171.192155] env[65503]: value = "task-4450863" [ 1171.192155] env[65503]: _type = "Task" [ 1171.192155] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.203035] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.210927] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450863, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.242031] env[65503]: DEBUG nova.compute.utils [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Build of instance 7b98ff40-1580-4175-adc5-66ca8977990a aborted: Failed to rebuild volume backed instance. {{(pid=65503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1171.244478] env[65503]: ERROR nova.compute.manager [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 7b98ff40-1580-4175-adc5-66ca8977990a aborted: Failed to rebuild volume backed instance. [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Traceback (most recent call last): [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 4224, in _do_rebuild_instance [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self.driver.rebuild(**kwargs) [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/driver.py", line 533, in rebuild [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] raise NotImplementedError() [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] NotImplementedError [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] During handling of the above exception, another exception occurred: [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Traceback (most recent call last): [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 3682, in _rebuild_volume_backed_instance [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self._detach_root_volume(context, instance, root_bdm) [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 3661, in _detach_root_volume [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] with excutils.save_and_reraise_exception(): [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self.force_reraise() [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] raise self.value [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 3647, in _detach_root_volume [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self.driver.detach_volume(context, old_connection_info, [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] return self._volumeops.detach_volume(connection_info, instance) [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self._detach_volume_vmdk(connection_info, instance) [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] stable_ref.fetch_moref(session) [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] nova.exception.InstanceNotFound: Instance 7b98ff40-1580-4175-adc5-66ca8977990a could not be found. [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] During handling of the above exception, another exception occurred: [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Traceback (most recent call last): [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 11553, in _error_out_instance_on_exception [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] yield [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 3950, in rebuild_instance [ 1171.244478] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self._do_rebuild_instance_with_claim( [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 4036, in _do_rebuild_instance_with_claim [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self._do_rebuild_instance( [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 4228, in _do_rebuild_instance [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self._rebuild_default_impl(**kwargs) [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 3805, in _rebuild_default_impl [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] self._rebuild_volume_backed_instance( [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] File "/opt/stack/nova/nova/compute/manager.py", line 3697, in _rebuild_volume_backed_instance [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] raise exception.BuildAbortException( [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] nova.exception.BuildAbortException: Build of instance 7b98ff40-1580-4175-adc5-66ca8977990a aborted: Failed to rebuild volume backed instance. [ 1171.245732] env[65503]: ERROR nova.compute.manager [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] [ 1171.421668] env[65503]: DEBUG oslo_vmware.api [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450861, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.671927] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "e37758cc-7287-4271-ad47-d711201d0add" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.672214] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "e37758cc-7287-4271-ad47-d711201d0add" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.672378] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "e37758cc-7287-4271-ad47-d711201d0add-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.672867] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "e37758cc-7287-4271-ad47-d711201d0add-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.673597] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "e37758cc-7287-4271-ad47-d711201d0add-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.675594] env[65503]: INFO nova.compute.manager [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Terminating instance [ 1171.699839] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097046} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.702645] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1171.703638] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dfaff9-2dd8-42ad-bc56-d6d4c1baf304 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.709517] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450863, 'name': PowerOffVM_Task, 'duration_secs': 0.229641} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.710077] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.710979] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353439b5-9ccd-4100-8848-97f3ec16e8f6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.733417] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1171.734856] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0296665-c5c6-4011-89eb-0f3a0fbffb0b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.771486] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac97d811-eefd-46a5-bb5e-dcad53875308 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.776437] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1171.776437] env[65503]: value = "task-4450864" [ 1171.776437] env[65503]: _type = "Task" [ 1171.776437] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.786924] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde05257-1f6a-4b4d-97bf-9e9fc35dccdf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.793570] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450864, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.798837] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc02127-afa8-4a70-b438-5b1d83c45bc5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.575759] env[65503]: DEBUG nova.compute.manager [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1172.575952] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1172.579685] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1172.583496] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3428f9d4-b0d6-4163-8969-663e6f0c4bc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.589159] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c222500-5282-49df-b8f4-7219867c7459 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.591510] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b852e0b1-4bdf-4fec-9d7a-6bf52e43df72 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.609576] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c9fa57-aad3-438b-bff4-c56760355ec6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.613249] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1172.613519] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450864, 'name': ReconfigVM_Task, 'duration_secs': 0.285218} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.613720] env[65503]: DEBUG oslo_vmware.api [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450861, 'name': ReconfigVM_Task, 'duration_secs': 1.55326} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.614889] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03523003-dfe8-490d-a4e4-955ddf26dc14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.616677] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Reconfigured VM instance instance-0000006e to attach disk [datastore1] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1172.617371] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe/volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1172.623508] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecfe13ea-6bc3-4ace-86a8-b411dacb5e7d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.624935] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-855da937-f5cc-4d60-b027-ff15e04ea5df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.634648] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1172.634648] env[65503]: value = "task-4450865" [ 1172.634648] env[65503]: _type = "Task" [ 1172.634648] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.643186] env[65503]: DEBUG nova.compute.provider_tree [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.649259] env[65503]: DEBUG oslo_vmware.api [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1172.649259] env[65503]: value = "task-4450866" [ 1172.649259] env[65503]: _type = "Task" [ 1172.649259] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.651916] env[65503]: DEBUG oslo_vmware.api [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1172.651916] env[65503]: value = "task-4450868" [ 1172.651916] env[65503]: _type = "Task" [ 1172.651916] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.652209] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1172.652209] env[65503]: value = "task-4450867" [ 1172.652209] env[65503]: _type = "Task" [ 1172.652209] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.666153] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1172.666403] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1172.666666] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.666854] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.667085] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.668065] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f4f1713-3f17-43d1-9ae8-aad93932197e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.681602] env[65503]: DEBUG oslo_vmware.api [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.682081] env[65503]: DEBUG oslo_vmware.api [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.682163] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450867, 'name': Rename_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.690912] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.691179] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1172.692049] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26bcb889-9528-4041-8c15-02ea2d89e364 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.698282] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1172.698282] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52661bab-bdd3-0613-1e3f-7feadc639ceb" [ 1172.698282] env[65503]: _type = "Task" [ 1172.698282] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.707233] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52661bab-bdd3-0613-1e3f-7feadc639ceb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.149645] env[65503]: DEBUG nova.scheduler.client.report [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1173.173140] env[65503]: DEBUG oslo_vmware.api [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450866, 'name': PowerOffVM_Task, 'duration_secs': 0.21607} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.173380] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450867, 'name': Rename_Task, 'duration_secs': 0.148629} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.176162] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1173.176343] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1173.176591] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1173.177152] env[65503]: DEBUG oslo_vmware.api [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450868, 'name': ReconfigVM_Task, 'duration_secs': 0.171417} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.177391] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-764cd753-539a-4506-aefc-8b2dfa8cdfc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.178845] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d27b742-415b-4ce3-a2af-de47ad8ef4f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.180320] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1173.187713] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1173.187713] env[65503]: value = "task-4450870" [ 1173.187713] env[65503]: _type = "Task" [ 1173.187713] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.196888] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.207471] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52661bab-bdd3-0613-1e3f-7feadc639ceb, 'name': SearchDatastore_Task, 'duration_secs': 0.009815} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.208260] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5028a41a-3aee-4a5a-b74a-3ff9f9d69cab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.214476] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1173.214476] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bcd5b3-e9c3-0d5a-eba1-2b94f2a141b9" [ 1173.214476] env[65503]: _type = "Task" [ 1173.214476] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.222967] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bcd5b3-e9c3-0d5a-eba1-2b94f2a141b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.249124] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1173.249381] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1173.249568] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleting the datastore file [datastore1] e37758cc-7287-4271-ad47-d711201d0add {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.249851] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7268f986-e1c4-49ee-8ff2-7a8b629088c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.257095] env[65503]: DEBUG oslo_vmware.api [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for the task: (returnval){ [ 1173.257095] env[65503]: value = "task-4450871" [ 1173.257095] env[65503]: _type = "Task" [ 1173.257095] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.266403] env[65503]: DEBUG oslo_vmware.api [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.603151] env[65503]: DEBUG oslo_concurrency.lockutils [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.658388] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.051s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.658916] env[65503]: DEBUG nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1173.661768] env[65503]: DEBUG oslo_concurrency.lockutils [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.059s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.698129] env[65503]: DEBUG oslo_vmware.api [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450870, 'name': PowerOnVM_Task, 'duration_secs': 0.469924} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.698459] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1173.698663] env[65503]: INFO nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Took 7.45 seconds to spawn the instance on the hypervisor. [ 1173.698844] env[65503]: DEBUG nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1173.701970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd0fde9-a8c1-4d76-bf63-99af6e9790a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.726280] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52bcd5b3-e9c3-0d5a-eba1-2b94f2a141b9, 'name': SearchDatastore_Task, 'duration_secs': 0.011926} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.730718] env[65503]: DEBUG oslo_concurrency.lockutils [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.731075] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. {{(pid=65503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1173.731772] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3b91469-c5ec-4641-903d-7da4d866c2e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.740973] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1173.740973] env[65503]: value = "task-4450872" [ 1173.740973] env[65503]: _type = "Task" [ 1173.740973] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.753764] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.772055] env[65503]: DEBUG oslo_vmware.api [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Task: {'id': task-4450871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145646} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.772404] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1173.772674] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1173.772920] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1173.773157] env[65503]: INFO nova.compute.manager [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] [instance: e37758cc-7287-4271-ad47-d711201d0add] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1173.773471] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1173.773704] env[65503]: DEBUG nova.compute.manager [-] [instance: e37758cc-7287-4271-ad47-d711201d0add] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1173.773830] env[65503]: DEBUG nova.network.neutron [-] [instance: e37758cc-7287-4271-ad47-d711201d0add] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1173.774174] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1173.774817] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1173.775179] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1173.814429] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e14a1a0-d328-4ff5-8bd4-e38ebd1ed250 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.823506] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1173.826391] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5a0118-1f9b-4f02-8d09-15f9bc90bfc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.860876] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055b8320-aa8a-4a72-aca9-b707dd6bfb15 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.868874] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eacc5f-a46f-4120-b93d-fdd8de05c113 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.883501] env[65503]: DEBUG nova.compute.provider_tree [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.086681] env[65503]: DEBUG nova.compute.manager [req-21f9571b-3203-493b-8dfa-4da99f87e4ce req-5a056b00-7776-47f1-8ccd-5a623e54ec05 service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Received event network-vif-deleted-21904121-6624-489f-b851-76b0dfc15641 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1174.086818] env[65503]: INFO nova.compute.manager [req-21f9571b-3203-493b-8dfa-4da99f87e4ce req-5a056b00-7776-47f1-8ccd-5a623e54ec05 service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Neutron deleted interface 21904121-6624-489f-b851-76b0dfc15641; detaching it from the instance and deleting it from the info cache [ 1174.087031] env[65503]: DEBUG nova.network.neutron [req-21f9571b-3203-493b-8dfa-4da99f87e4ce req-5a056b00-7776-47f1-8ccd-5a623e54ec05 service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1174.167035] env[65503]: DEBUG nova.compute.utils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1174.168144] env[65503]: DEBUG nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1174.168344] env[65503]: DEBUG nova.network.neutron [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1174.168666] env[65503]: WARNING neutronclient.v2_0.client [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1174.168967] env[65503]: WARNING neutronclient.v2_0.client [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1174.169640] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1174.170050] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1174.222447] env[65503]: INFO nova.compute.manager [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Took 12.30 seconds to build instance. [ 1174.232767] env[65503]: DEBUG nova.objects.instance [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'flavor' on Instance uuid afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.254359] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501509} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.257596] env[65503]: DEBUG nova.policy [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d16022d9f8b43ba8e97191fdc2b1a8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3658921b747e4d78a2046b838cb36d26', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1174.259658] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk. [ 1174.260927] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1b9f01-2124-4824-9664-81abbf219009 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.289065] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1174.291867] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ed6a173-b54c-4e99-b6bf-77daaf528a62 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.312495] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1174.312495] env[65503]: value = "task-4450873" [ 1174.312495] env[65503]: _type = "Task" [ 1174.312495] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.323701] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.390019] env[65503]: DEBUG nova.scheduler.client.report [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1174.561665] env[65503]: DEBUG nova.network.neutron [-] [instance: e37758cc-7287-4271-ad47-d711201d0add] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1174.590879] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d87ac1b-8960-417e-8ef7-a1b0b419008b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.602075] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08c4f26-b015-4f13-85d0-27e6c9e0009a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.636504] env[65503]: DEBUG nova.compute.manager [req-21f9571b-3203-493b-8dfa-4da99f87e4ce req-5a056b00-7776-47f1-8ccd-5a623e54ec05 service nova] [instance: e37758cc-7287-4271-ad47-d711201d0add] Detach interface failed, port_id=21904121-6624-489f-b851-76b0dfc15641, reason: Instance e37758cc-7287-4271-ad47-d711201d0add could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1174.670297] env[65503]: DEBUG nova.network.neutron [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Successfully created port: f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1174.679263] env[65503]: DEBUG nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1174.726181] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2578141f-065f-4269-b499-a7506685eed4 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.809s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.738572] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5fd1baeb-46c0-4f8c-82d4-88ea18b23cc5 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.530s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.825713] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450873, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.898042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.234s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.898042] env[65503]: INFO nova.compute.manager [None req-08cc9d22-cb8e-46dc-8e51-b96299fefc16 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Successfully reverted task state from rebuilding on failure for instance. [ 1175.005463] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquiring lock "7b98ff40-1580-4175-adc5-66ca8977990a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.005729] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "7b98ff40-1580-4175-adc5-66ca8977990a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.006011] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquiring lock "7b98ff40-1580-4175-adc5-66ca8977990a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.006218] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "7b98ff40-1580-4175-adc5-66ca8977990a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.006382] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "7b98ff40-1580-4175-adc5-66ca8977990a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.008721] env[65503]: INFO nova.compute.manager [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Terminating instance [ 1175.031135] env[65503]: INFO nova.compute.manager [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Rebuilding instance [ 1175.066450] env[65503]: INFO nova.compute.manager [-] [instance: e37758cc-7287-4271-ad47-d711201d0add] Took 1.29 seconds to deallocate network for instance. [ 1175.085718] env[65503]: DEBUG nova.compute.manager [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1175.087160] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63891801-9db5-4fbe-8272-c584b961141b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.324500] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450873, 'name': ReconfigVM_Task, 'duration_secs': 0.563545} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.325461] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19/d68ffece-ab91-4610-b535-fa1fb25ade93-rescue.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1175.325682] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c6ac1b-bcb4-42f7-b2dc-2180265f5ca5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.351455] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7006c691-02d1-4c93-be37-b9f263001c5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.369501] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1175.369501] env[65503]: value = "task-4450874" [ 1175.369501] env[65503]: _type = "Task" [ 1175.369501] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.378834] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.440213] env[65503]: DEBUG nova.compute.manager [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1175.440213] env[65503]: DEBUG nova.compute.manager [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing instance network info cache due to event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1175.440213] env[65503]: DEBUG oslo_concurrency.lockutils [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Acquiring lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.440213] env[65503]: DEBUG oslo_concurrency.lockutils [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Acquired lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.440213] env[65503]: DEBUG nova.network.neutron [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1175.514020] env[65503]: DEBUG nova.compute.manager [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1175.514139] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9f8c922-39c3-4c7c-a942-32bb71cb0944 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.524553] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3b6568-3c1f-4f14-b963-8d1feff9817f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.555144] env[65503]: WARNING nova.virt.vmwareapi.driver [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 7b98ff40-1580-4175-adc5-66ca8977990a could not be found. [ 1175.555370] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1175.555695] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc1b3e2d-1893-4c25-9822-df040c9ced85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.564768] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fcd4a3-5af6-400e-9663-949a27774157 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.575872] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.576150] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.576323] env[65503]: DEBUG nova.objects.instance [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lazy-loading 'resources' on Instance uuid e37758cc-7287-4271-ad47-d711201d0add {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1175.598684] env[65503]: WARNING nova.virt.vmwareapi.vmops [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7b98ff40-1580-4175-adc5-66ca8977990a could not be found. [ 1175.598891] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1175.599080] env[65503]: INFO nova.compute.manager [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1175.599330] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1175.602104] env[65503]: DEBUG nova.compute.manager [-] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1175.602205] env[65503]: DEBUG nova.network.neutron [-] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1175.602461] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1175.602979] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1175.603255] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1175.645466] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1175.690525] env[65503]: DEBUG nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1175.719178] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1175.719665] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1175.719665] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1175.719760] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1175.720068] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1175.720068] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1175.720247] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1175.720394] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1175.720551] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1175.720704] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1175.720895] env[65503]: DEBUG nova.virt.hardware [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1175.721765] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f381b63b-7821-476b-9fc6-448ab2c1e43b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.732909] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c98ba0f-8285-4a6c-bb29-39f775e53ef0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.880054] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450874, 'name': ReconfigVM_Task, 'duration_secs': 0.153328} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.880198] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1175.880416] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12f898ec-c336-46aa-a63d-be5b070279ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.887711] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1175.887711] env[65503]: value = "task-4450875" [ 1175.887711] env[65503]: _type = "Task" [ 1175.887711] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.896048] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.943311] env[65503]: WARNING neutronclient.v2_0.client [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1175.943769] env[65503]: WARNING openstack [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1175.944414] env[65503]: WARNING openstack [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1176.111663] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.111814] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9e51886-580c-44fb-9886-63cde41f8d2c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.119781] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1176.119781] env[65503]: value = "task-4450876" [ 1176.119781] env[65503]: _type = "Task" [ 1176.119781] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.136838] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.227759] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18d6d75-3b09-44a3-8152-a257573fe9cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.241545] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908f1f84-e111-4f6f-b0a5-873e2050f455 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.282825] env[65503]: DEBUG nova.network.neutron [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Successfully updated port: f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1176.285064] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472d6aef-ec15-4ada-9a4b-c14b948f49b7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.297305] env[65503]: WARNING openstack [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1176.297815] env[65503]: WARNING openstack [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1176.306744] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2331724e-f23b-4611-a07a-9854134d5cdc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.325500] env[65503]: DEBUG nova.compute.provider_tree [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.344579] env[65503]: DEBUG nova.compute.manager [req-22751502-06f9-4e19-a5cd-a98c269fc04c req-99436354-3073-4387-b526-a89428e3da90 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-vif-plugged-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1176.344886] env[65503]: DEBUG oslo_concurrency.lockutils [req-22751502-06f9-4e19-a5cd-a98c269fc04c req-99436354-3073-4387-b526-a89428e3da90 service nova] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.345188] env[65503]: DEBUG oslo_concurrency.lockutils [req-22751502-06f9-4e19-a5cd-a98c269fc04c req-99436354-3073-4387-b526-a89428e3da90 service nova] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.345408] env[65503]: DEBUG oslo_concurrency.lockutils [req-22751502-06f9-4e19-a5cd-a98c269fc04c req-99436354-3073-4387-b526-a89428e3da90 service nova] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.345609] env[65503]: DEBUG nova.compute.manager [req-22751502-06f9-4e19-a5cd-a98c269fc04c req-99436354-3073-4387-b526-a89428e3da90 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] No waiting events found dispatching network-vif-plugged-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1176.345814] env[65503]: WARNING nova.compute.manager [req-22751502-06f9-4e19-a5cd-a98c269fc04c req-99436354-3073-4387-b526-a89428e3da90 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received unexpected event network-vif-plugged-f896d088-0ab2-44cc-a26c-6593c073467c for instance with vm_state building and task_state spawning. [ 1176.372482] env[65503]: WARNING neutronclient.v2_0.client [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1176.372856] env[65503]: WARNING openstack [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1176.373231] env[65503]: WARNING openstack [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1176.400409] env[65503]: DEBUG oslo_vmware.api [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450875, 'name': PowerOnVM_Task, 'duration_secs': 0.476622} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.401037] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1176.404902] env[65503]: DEBUG nova.compute.manager [None req-dfdf8114-791c-4383-bd64-0dc32e95ed1c tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1176.405996] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121bc879-86a6-40ee-9b58-f5aac4bc473c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.491064] env[65503]: DEBUG nova.network.neutron [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updated VIF entry in instance network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1176.491064] env[65503]: DEBUG nova.network.neutron [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7da6c50-23", "ovs_interfaceid": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1176.571884] env[65503]: DEBUG nova.network.neutron [-] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1176.639095] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450876, 'name': PowerOffVM_Task, 'duration_secs': 0.249473} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.639806] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.698817] env[65503]: INFO nova.compute.manager [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Detaching volume 94222922-f7c9-48bc-96b8-a28f0f9ddbbe [ 1176.733624] env[65503]: INFO nova.virt.block_device [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Attempting to driver detach volume 94222922-f7c9-48bc-96b8-a28f0f9ddbbe from mountpoint /dev/sdb [ 1176.733863] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1176.734077] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1176.735090] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a6898b-eef1-4568-be5c-b2f271f6860a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.758090] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b11608-3951-4223-bb0c-d91e0a6e5b48 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.766291] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a2e243-ce92-4bfa-97ec-cdc874bc3652 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.787817] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c50e6b8-8424-49c7-9fc5-4c01f0620495 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.790762] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.790932] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.791119] env[65503]: DEBUG nova.network.neutron [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1176.805950] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The volume has not been displaced from its original location: [datastore1] volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe/volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1176.811248] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1176.812292] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-366fdbb9-8ce4-4b4c-b8fc-ea2ebb9717af {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.829200] env[65503]: DEBUG nova.scheduler.client.report [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1176.834418] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1176.834418] env[65503]: value = "task-4450877" [ 1176.834418] env[65503]: _type = "Task" [ 1176.834418] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.847127] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.993347] env[65503]: DEBUG oslo_concurrency.lockutils [req-c4479ddc-e273-49a4-9681-1689350374cd req-ad5199dd-ae40-498c-9b0e-916b5e983955 service nova] Releasing lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.075645] env[65503]: INFO nova.compute.manager [-] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Took 1.47 seconds to deallocate network for instance. [ 1177.296464] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1177.296691] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1177.335346] env[65503]: DEBUG nova.network.neutron [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1177.343786] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.366416] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450877, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.379059] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1177.379903] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1177.437887] env[65503]: INFO nova.scheduler.client.report [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Deleted allocations for instance e37758cc-7287-4271-ad47-d711201d0add [ 1177.562829] env[65503]: WARNING neutronclient.v2_0.client [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1177.563768] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1177.564384] env[65503]: WARNING openstack [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1177.577362] env[65503]: DEBUG nova.compute.manager [req-9863b1b5-7b6c-4138-87f9-7ddf4d6c0faa req-32a6a3af-adc3-43f4-bb81-d9466ac501de service nova] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Received event network-vif-deleted-816cbefe-fa43-42dd-8737-9679d9a453e8 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1177.620141] env[65503]: INFO nova.compute.manager [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Took 0.54 seconds to detach 1 volumes for instance. [ 1177.627546] env[65503]: DEBUG nova.compute.manager [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Deleting volume: 856eed9a-2608-4d0d-b7b3-52e61d723973 {{(pid=65503) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3320}} [ 1177.684482] env[65503]: DEBUG nova.network.neutron [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf896d088-0a", "ovs_interfaceid": "f896d088-0ab2-44cc-a26c-6593c073467c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1177.863698] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450877, 'name': ReconfigVM_Task, 'duration_secs': 0.592279} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.863698] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1177.868187] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31103c68-04b1-4995-8537-5ff6922504de {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.885839] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1177.885839] env[65503]: value = "task-4450879" [ 1177.885839] env[65503]: _type = "Task" [ 1177.885839] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.897048] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450879, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.945576] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b1a9f2e-17e4-4e1e-91c6-85abde0b4a27 tempest-ServerRescueNegativeTestJSON-1989104803 tempest-ServerRescueNegativeTestJSON-1989104803-project-member] Lock "e37758cc-7287-4271-ad47-d711201d0add" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.273s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.189760] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.189760] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.189760] env[65503]: DEBUG nova.objects.instance [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lazy-loading 'resources' on Instance uuid 7b98ff40-1580-4175-adc5-66ca8977990a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1178.190445] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.190866] env[65503]: DEBUG nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Instance network_info: |[{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf896d088-0a", "ovs_interfaceid": "f896d088-0ab2-44cc-a26c-6593c073467c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1178.192328] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:ba:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f896d088-0ab2-44cc-a26c-6593c073467c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1178.204565] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1178.206075] env[65503]: INFO nova.compute.manager [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Unrescuing [ 1178.206480] env[65503]: DEBUG oslo_concurrency.lockutils [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.206737] env[65503]: DEBUG oslo_concurrency.lockutils [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquired lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.207040] env[65503]: DEBUG nova.network.neutron [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1178.210112] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1178.213671] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fee6917-6d6f-422b-8fa4-9d72b30920db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.236067] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1178.236067] env[65503]: value = "task-4450880" [ 1178.236067] env[65503]: _type = "Task" [ 1178.236067] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.253212] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450880, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.400848] env[65503]: DEBUG nova.compute.manager [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1178.400848] env[65503]: DEBUG nova.compute.manager [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing instance network info cache due to event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1178.400952] env[65503]: DEBUG oslo_concurrency.lockutils [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Acquiring lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.401056] env[65503]: DEBUG oslo_concurrency.lockutils [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Acquired lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.401239] env[65503]: DEBUG nova.network.neutron [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1178.409847] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450879, 'name': ReconfigVM_Task, 'duration_secs': 0.266896} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.410200] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1178.711364] env[65503]: WARNING neutronclient.v2_0.client [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1178.712104] env[65503]: WARNING openstack [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1178.712434] env[65503]: WARNING openstack [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1178.750863] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450880, 'name': CreateVM_Task, 'duration_secs': 0.374222} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.750863] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1178.751531] env[65503]: WARNING neutronclient.v2_0.client [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1178.752120] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.752373] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.752803] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1178.753207] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-537207f6-efa8-4424-b3b5-af8c139bee62 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.762722] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1178.762722] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5259ea93-2c14-9003-6ae8-13d2fdb08663" [ 1178.762722] env[65503]: _type = "Task" [ 1178.762722] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.774221] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5259ea93-2c14-9003-6ae8-13d2fdb08663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.837313] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cc5395-ee90-4f22-beb9-10b2087f843c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.850024] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a566ba-83ae-4324-bb5c-f013472cf17c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.890707] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0da2adf-cfdb-450e-a550-c0ff0c3051fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.901050] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1b7f26-db71-451d-aa70-d199f6c681e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.906518] env[65503]: WARNING neutronclient.v2_0.client [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1178.907364] env[65503]: WARNING openstack [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1178.907881] env[65503]: WARNING openstack [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1178.926697] env[65503]: DEBUG nova.compute.provider_tree [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.979665] env[65503]: WARNING openstack [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1178.980187] env[65503]: WARNING openstack [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1179.081403] env[65503]: WARNING neutronclient.v2_0.client [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1179.082096] env[65503]: WARNING openstack [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1179.082446] env[65503]: WARNING openstack [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1179.095344] env[65503]: WARNING openstack [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1179.095731] env[65503]: WARNING openstack [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1179.200956] env[65503]: DEBUG nova.network.neutron [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Updating instance_info_cache with network_info: [{"id": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "address": "fa:16:3e:72:c7:8d", "network": {"id": "3686d3c7-ea9f-4310-b1ef-785e7f12fc72", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1730162076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "da471ceaf0f348cd87e42d3c03fdfeb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d6afe7-1d", "ovs_interfaceid": "e8d6afe7-1d1d-402a-a7f1-86cd2d14c559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1179.227287] env[65503]: WARNING neutronclient.v2_0.client [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1179.227993] env[65503]: WARNING openstack [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1179.228199] env[65503]: WARNING openstack [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1179.278720] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5259ea93-2c14-9003-6ae8-13d2fdb08663, 'name': SearchDatastore_Task, 'duration_secs': 0.010261} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.283457] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.283718] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1179.283964] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.284475] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.284683] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1179.285218] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78d297b9-2d21-4fe3-86c6-6d9947d287f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.295441] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1179.295661] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1179.296504] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5883099-6849-45d2-9572-a9ddfbd72899 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.303426] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1179.303426] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba5830-759a-7622-0883-78d24ea34354" [ 1179.303426] env[65503]: _type = "Task" [ 1179.303426] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.313151] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba5830-759a-7622-0883-78d24ea34354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.347867] env[65503]: DEBUG nova.network.neutron [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updated VIF entry in instance network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1179.348518] env[65503]: DEBUG nova.network.neutron [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf896d088-0a", "ovs_interfaceid": "f896d088-0ab2-44cc-a26c-6593c073467c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1179.430424] env[65503]: DEBUG nova.scheduler.client.report [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1179.489442] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.489966] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc9f30d5-b36b-4dc1-8c28-ea2139bca557 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.499133] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1179.499133] env[65503]: value = "task-4450881" [ 1179.499133] env[65503]: _type = "Task" [ 1179.499133] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.508314] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.704542] env[65503]: DEBUG oslo_concurrency.lockutils [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Releasing lock "refresh_cache-6f9a7c04-78dd-46b2-a42d-6ff218478f19" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.705465] env[65503]: DEBUG nova.objects.instance [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lazy-loading 'flavor' on Instance uuid 6f9a7c04-78dd-46b2-a42d-6ff218478f19 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.813987] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ba5830-759a-7622-0883-78d24ea34354, 'name': SearchDatastore_Task, 'duration_secs': 0.011221} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.815295] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b7059b0-6340-449c-9a87-8a259ab6acd9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.821642] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1179.821642] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc4c5a-cfb5-00f3-b2bd-93360e0bd7e4" [ 1179.821642] env[65503]: _type = "Task" [ 1179.821642] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.831223] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc4c5a-cfb5-00f3-b2bd-93360e0bd7e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.851563] env[65503]: DEBUG oslo_concurrency.lockutils [req-87684be7-1d9c-4489-91ff-64703ad7c1dc req-129ecd71-496a-4aac-8cbb-52d3d41db9d0 service nova] Releasing lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.936442] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.748s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.011369] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1180.011655] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1180.011954] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1180.012838] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b4ce0e-8d7a-4c01-8657-06dceec32473 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.037474] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4d367f-3a86-4ecd-a75d-e06ca9722381 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.046161] env[65503]: WARNING nova.virt.vmwareapi.driver [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1180.047084] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1180.047391] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8542960e-7b5d-4069-afcf-bbf7913b3c86 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.056125] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1180.056433] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1bbf2ff-fc35-4dc7-a294-c9a05ba05275 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.150117] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1180.150292] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1180.150457] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleting the datastore file [datastore2] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1180.150872] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33a4bb56-4ea2-46c4-a002-108b64e1c454 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.160038] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1180.160038] env[65503]: value = "task-4450883" [ 1180.160038] env[65503]: _type = "Task" [ 1180.160038] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.170099] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450883, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.211495] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10dc7d7-a874-4b8b-b3b7-373d6a0c5b9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.237984] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1180.238385] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91bd1d68-18bc-4645-80bc-c8d1f409565b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.246602] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1180.246602] env[65503]: value = "task-4450884" [ 1180.246602] env[65503]: _type = "Task" [ 1180.246602] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.258197] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.333204] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc4c5a-cfb5-00f3-b2bd-93360e0bd7e4, 'name': SearchDatastore_Task, 'duration_secs': 0.020485} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.333516] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.333975] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1180.334152] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0975e78e-fecd-4b49-9d45-04f8967805fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.342042] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1180.342042] env[65503]: value = "task-4450885" [ 1180.342042] env[65503]: _type = "Task" [ 1180.342042] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.352661] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.457368] env[65503]: DEBUG oslo_concurrency.lockutils [None req-441589a9-b883-48d8-bcc8-7464d32a7e06 tempest-ServerActionsV293TestJSON-2086592069 tempest-ServerActionsV293TestJSON-2086592069-project-member] Lock "7b98ff40-1580-4175-adc5-66ca8977990a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.451s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.672626] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450883, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143403} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.672816] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1180.672863] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1180.673034] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1180.758463] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450884, 'name': PowerOffVM_Task, 'duration_secs': 0.237613} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.758769] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1180.764107] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1180.764836] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d5d4fa8-30d2-47bc-917a-7c6f6b069e45 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.787071] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1180.787071] env[65503]: value = "task-4450886" [ 1180.787071] env[65503]: _type = "Task" [ 1180.787071] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.798571] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450886, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.852248] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450885, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48568} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.852528] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1180.852740] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.853011] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fda4c72-2eee-4121-a24b-73ae44f2edc1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.861438] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1180.861438] env[65503]: value = "task-4450887" [ 1180.861438] env[65503]: _type = "Task" [ 1180.861438] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.871421] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.178376] env[65503]: INFO nova.virt.block_device [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Booting with volume 94222922-f7c9-48bc-96b8-a28f0f9ddbbe at /dev/sdb [ 1181.212727] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c074874-e55c-482e-a997-c069daf29f21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.222730] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f6fa29-d6f1-4ee4-a715-fc46d96dc933 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.254511] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15e79b22-1fc0-40de-913e-fb476b1cb5d9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.265263] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834faae4-df17-4034-b390-4c952263243f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.303954] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6320d04-00d0-4e8c-95f1-4110527d24c8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.310239] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450886, 'name': ReconfigVM_Task, 'duration_secs': 0.221621} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.311210] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1181.311428] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1181.311719] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9358d95-8d1d-4c14-ac63-f751c9ba7b42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.317097] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2f9fc3-8fe9-4ffd-a5be-7f3c92ce7bca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.321822] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1181.321822] env[65503]: value = "task-4450888" [ 1181.321822] env[65503]: _type = "Task" [ 1181.321822] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.330598] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.334456] env[65503]: DEBUG nova.virt.block_device [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updating existing volume attachment record: c3af7079-8ca4-4b10-81fd-5fa8e9193454 {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1181.372471] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068051} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.372993] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1181.373651] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45570ae-a20c-4c1a-a648-4e76aeb8ab18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.399058] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.399426] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97e94559-f5d9-4d9d-9028-bc5c2d459b24 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.420225] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1181.420225] env[65503]: value = "task-4450889" [ 1181.420225] env[65503]: _type = "Task" [ 1181.420225] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.429506] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.833177] env[65503]: DEBUG oslo_vmware.api [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450888, 'name': PowerOnVM_Task, 'duration_secs': 0.396616} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.833325] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1181.833536] env[65503]: DEBUG nova.compute.manager [None req-72e01b1b-80db-4c29-991c-0dc70e1d1541 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1181.834698] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2127ad1e-0046-4c94-8f99-5322607eac89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.932323] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450889, 'name': ReconfigVM_Task, 'duration_secs': 0.291704} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.932644] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.933342] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d70f8c10-1fda-4977-a79f-bbf60d026fff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.942078] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1181.942078] env[65503]: value = "task-4450890" [ 1181.942078] env[65503]: _type = "Task" [ 1181.942078] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.952013] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450890, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.455648] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450890, 'name': Rename_Task, 'duration_secs': 0.181608} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.456152] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.456409] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2a896f6-f3b1-423f-8b3b-a7ecf0694042 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.465137] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1182.465137] env[65503]: value = "task-4450891" [ 1182.465137] env[65503]: _type = "Task" [ 1182.465137] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.474916] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.957127] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.957493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.957736] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.957952] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.958148] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.960417] env[65503]: INFO nova.compute.manager [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Terminating instance [ 1182.977654] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450891, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.459913] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1183.460266] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.460452] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1183.460696] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.460877] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1183.461070] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1183.461338] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1183.461514] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1183.461708] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1183.461867] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1183.462040] env[65503]: DEBUG nova.virt.hardware [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1183.462900] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc03e94-c061-459b-bde3-2eef91e76180 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.466248] env[65503]: DEBUG nova.compute.manager [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1183.466440] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1183.467267] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88aefc5-4c68-4fb9-b8d0-51866af4070a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.476493] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848d2d2a-3a85-463b-bf17-92f16a33660b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.484672] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1183.484931] env[65503]: DEBUG oslo_vmware.api [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450891, 'name': PowerOnVM_Task, 'duration_secs': 0.723443} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.485452] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87766872-d834-403d-8397-7ec20c06fa9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.487108] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.487353] env[65503]: INFO nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1183.487583] env[65503]: DEBUG nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1183.496252] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3b89a4-4e4a-4a91-84da-7e9a4d8e8423 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.499436] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:ca:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a7ba8d0-0208-4af7-af44-2a5ad382f9be', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1183.507056] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1183.507655] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1183.508180] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6efff02f-bee9-45dd-817e-854b0cf2388d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.524547] env[65503]: DEBUG oslo_vmware.api [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1183.524547] env[65503]: value = "task-4450892" [ 1183.524547] env[65503]: _type = "Task" [ 1183.524547] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.532569] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1183.532569] env[65503]: value = "task-4450893" [ 1183.532569] env[65503]: _type = "Task" [ 1183.532569] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.538775] env[65503]: DEBUG oslo_vmware.api [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.544590] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450893, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.043612] env[65503]: INFO nova.compute.manager [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Took 13.60 seconds to build instance. [ 1184.049664] env[65503]: DEBUG oslo_vmware.api [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450892, 'name': PowerOffVM_Task, 'duration_secs': 0.199353} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.054191] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1184.054427] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1184.054669] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450893, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.054940] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aab191fd-2b05-4bf9-bc12-6aa02d02919f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.151636] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1184.151861] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1184.152054] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Deleting the datastore file [datastore2] 6f9a7c04-78dd-46b2-a42d-6ff218478f19 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1184.152344] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bba9261-586f-456e-b776-f894c9a25dd7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.159920] env[65503]: DEBUG oslo_vmware.api [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1184.159920] env[65503]: value = "task-4450895" [ 1184.159920] env[65503]: _type = "Task" [ 1184.159920] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.168891] env[65503]: DEBUG oslo_vmware.api [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450895, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.546018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-877462e2-b28d-4b70-ad70-00a487628bd5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.118s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.550264] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450893, 'name': CreateVM_Task, 'duration_secs': 0.619634} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.550436] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1184.550927] env[65503]: WARNING neutronclient.v2_0.client [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 1184.551311] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.551459] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.551762] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1184.552032] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72a7a33b-1d38-4b7a-948d-19dbc1867003 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.557934] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1184.557934] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f15352-dcb2-712b-c585-0599fb6f6417" [ 1184.557934] env[65503]: _type = "Task" [ 1184.557934] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.567234] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f15352-dcb2-712b-c585-0599fb6f6417, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.670221] env[65503]: DEBUG oslo_vmware.api [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450895, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453353} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.671094] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1184.671094] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1184.671094] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1184.671094] env[65503]: INFO nova.compute.manager [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1184.671274] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1184.671715] env[65503]: DEBUG nova.compute.manager [-] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1184.671715] env[65503]: DEBUG nova.network.neutron [-] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1184.671870] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1184.672626] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1184.672694] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1184.741780] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1184.981119] env[65503]: DEBUG nova.compute.manager [req-de1cbd67-aa58-4616-aaba-a69888d929e9 req-d3021ecc-51bb-4022-9d19-32fbd5e03254 service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Received event network-vif-deleted-e8d6afe7-1d1d-402a-a7f1-86cd2d14c559 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1184.981315] env[65503]: INFO nova.compute.manager [req-de1cbd67-aa58-4616-aaba-a69888d929e9 req-d3021ecc-51bb-4022-9d19-32fbd5e03254 service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Neutron deleted interface e8d6afe7-1d1d-402a-a7f1-86cd2d14c559; detaching it from the instance and deleting it from the info cache [ 1184.981469] env[65503]: DEBUG nova.network.neutron [req-de1cbd67-aa58-4616-aaba-a69888d929e9 req-d3021ecc-51bb-4022-9d19-32fbd5e03254 service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1185.069791] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f15352-dcb2-712b-c585-0599fb6f6417, 'name': SearchDatastore_Task, 'duration_secs': 0.018664} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.070115] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.070341] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1185.070569] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.070699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.070870] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1185.071167] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-656cc632-5edd-40b2-9c6d-d68f3f3461c5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.074834] env[65503]: DEBUG nova.compute.manager [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1185.074955] env[65503]: DEBUG nova.compute.manager [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing instance network info cache due to event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1185.075153] env[65503]: DEBUG oslo_concurrency.lockutils [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Acquiring lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.075287] env[65503]: DEBUG oslo_concurrency.lockutils [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Acquired lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.075441] env[65503]: DEBUG nova.network.neutron [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1185.085524] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1185.085697] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1185.086560] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5132672a-6947-4393-b73c-83813331139e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.092926] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1185.092926] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529418c5-22dc-38b8-5a2b-bdfd697b55f6" [ 1185.092926] env[65503]: _type = "Task" [ 1185.092926] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.102418] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529418c5-22dc-38b8-5a2b-bdfd697b55f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.461233] env[65503]: DEBUG nova.network.neutron [-] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1185.485060] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59bdc22b-6284-447e-a01b-546d143dd83b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.495900] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e485164-796e-4d0b-b4ec-8896fd6f1d41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.528762] env[65503]: DEBUG nova.compute.manager [req-de1cbd67-aa58-4616-aaba-a69888d929e9 req-d3021ecc-51bb-4022-9d19-32fbd5e03254 service nova] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Detach interface failed, port_id=e8d6afe7-1d1d-402a-a7f1-86cd2d14c559, reason: Instance 6f9a7c04-78dd-46b2-a42d-6ff218478f19 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1185.578431] env[65503]: WARNING neutronclient.v2_0.client [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1185.579178] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.579535] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.603913] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529418c5-22dc-38b8-5a2b-bdfd697b55f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009892} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.604783] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-553e9d23-b166-4450-98cb-44cd454d7675 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.611059] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1185.611059] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ae7fc-a5e0-6347-f6a4-aa4860feda69" [ 1185.611059] env[65503]: _type = "Task" [ 1185.611059] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.619176] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ae7fc-a5e0-6347-f6a4-aa4860feda69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.820461] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.820934] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.883562] env[65503]: WARNING neutronclient.v2_0.client [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1185.884357] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.884795] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.891106] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.891488] env[65503]: WARNING openstack [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.967522] env[65503]: INFO nova.compute.manager [-] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Took 1.30 seconds to deallocate network for instance. [ 1186.045303] env[65503]: DEBUG nova.network.neutron [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updated VIF entry in instance network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1186.045692] env[65503]: DEBUG nova.network.neutron [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf896d088-0a", "ovs_interfaceid": "f896d088-0ab2-44cc-a26c-6593c073467c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1186.121841] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526ae7fc-a5e0-6347-f6a4-aa4860feda69, 'name': SearchDatastore_Task, 'duration_secs': 0.009983} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.122052] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.122319] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1186.122582] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-772d5115-ff2e-44c8-9687-a6627fcad70a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.130310] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1186.130310] env[65503]: value = "task-4450896" [ 1186.130310] env[65503]: _type = "Task" [ 1186.130310] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.139505] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.473916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.474282] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.474522] env[65503]: DEBUG nova.objects.instance [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lazy-loading 'resources' on Instance uuid 6f9a7c04-78dd-46b2-a42d-6ff218478f19 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1186.549566] env[65503]: DEBUG oslo_concurrency.lockutils [req-384d60c6-8074-474d-8e34-6d0d7fc0befc req-95a3742d-e4b7-4bea-b9bc-386ed8ce7509 service nova] Releasing lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.642151] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450896, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509593} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.642555] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1186.642648] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1186.642904] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b674996-9192-4eda-8850-be1dc00859e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.651368] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1186.651368] env[65503]: value = "task-4450897" [ 1186.651368] env[65503]: _type = "Task" [ 1186.651368] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.662371] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450897, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.072193] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a96fc63-ed32-4406-a687-ece5cd90d48f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.080318] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84901ce6-e7f1-43fb-bd8d-4432e3b1e6ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.112033] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d00e5ca-cf7e-46db-89f6-aa5b5da6cdfc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.120397] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee04a7f8-a79e-4835-baac-afe0a3a56788 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.136039] env[65503]: DEBUG nova.compute.provider_tree [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.161394] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450897, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067404} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.161667] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1187.162472] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d9c9a0-f325-437b-9912-880232ef3270 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.183938] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1187.184489] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6097f0e9-2634-45d4-92e0-894aeeef27a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.204247] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1187.204247] env[65503]: value = "task-4450898" [ 1187.204247] env[65503]: _type = "Task" [ 1187.204247] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.212363] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.638887] env[65503]: DEBUG nova.scheduler.client.report [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1187.715344] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.145020] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.670s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.168138] env[65503]: INFO nova.scheduler.client.report [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Deleted allocations for instance 6f9a7c04-78dd-46b2-a42d-6ff218478f19 [ 1188.215838] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.275653] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.275926] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.276175] env[65503]: DEBUG nova.compute.manager [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1188.277086] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697e282d-20fa-459a-9b20-12496bd7fb91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.284442] env[65503]: DEBUG nova.compute.manager [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1188.285038] env[65503]: DEBUG nova.objects.instance [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'flavor' on Instance uuid 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.676348] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7c70d380-16ea-4fb6-8fe4-e523c406a4b2 tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "6f9a7c04-78dd-46b2-a42d-6ff218478f19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.719s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.717090] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450898, 'name': ReconfigVM_Task, 'duration_secs': 1.215163} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.717456] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfigured VM instance instance-0000006a to attach disk [datastore1] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad/afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1188.719401] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'boot_index': 0, 'device_type': 'disk', 'encrypted': False, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'disk_bus': None, 'size': 0, 'encryption_format': None, 'image_id': 'd68ffece-ab91-4610-b535-fa1fb25ade93'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': None, 'mount_device': '/dev/sdb', 'device_type': None, 'attachment_id': 'c3af7079-8ca4-4b10-81fd-5fa8e9193454', 'delete_on_termination': False, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'}, 'volume_type': None}], 'swap': None} {{(pid=65503) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1188.719707] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1188.719909] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1188.720988] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331e7351-1770-4cc9-a82c-1eb595320e25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.738058] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1e7732-889c-4c33-ad53-8d8acd75cb80 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.763131] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe/volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.763454] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efe6754a-7bc7-45ea-bda9-f606f2f772f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.782676] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1188.782676] env[65503]: value = "task-4450899" [ 1188.782676] env[65503]: _type = "Task" [ 1188.782676] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.792583] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450899, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.293580] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450899, 'name': ReconfigVM_Task, 'duration_secs': 0.291525} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.293876] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe/volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.298788] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1189.299024] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d660832-c7ed-457b-adc9-c616446f6df3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.309101] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75a32288-18e4-4a66-9fad-96350605b8ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.317678] env[65503]: DEBUG oslo_vmware.api [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1189.317678] env[65503]: value = "task-4450901" [ 1189.317678] env[65503]: _type = "Task" [ 1189.317678] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.319161] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1189.319161] env[65503]: value = "task-4450900" [ 1189.319161] env[65503]: _type = "Task" [ 1189.319161] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.334700] env[65503]: DEBUG oslo_vmware.api [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450901, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.339479] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450900, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.774655] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.775038] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.775192] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "bc0dad37-fc1d-4edc-8680-dba294dd724e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.775376] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.775539] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.777840] env[65503]: INFO nova.compute.manager [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Terminating instance [ 1189.828962] env[65503]: DEBUG oslo_vmware.api [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450901, 'name': PowerOffVM_Task, 'duration_secs': 0.19235} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.832223] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1189.833020] env[65503]: DEBUG nova.compute.manager [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1189.833208] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414f5fb3-d805-4dde-877a-9557e88fcde3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.841469] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450900, 'name': ReconfigVM_Task, 'duration_secs': 0.167036} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.843065] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1189.845661] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5315379b-6df1-4183-b36a-60eed5e948e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.853588] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1189.853588] env[65503]: value = "task-4450902" [ 1189.853588] env[65503]: _type = "Task" [ 1189.853588] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.862702] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450902, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.282062] env[65503]: DEBUG nova.compute.manager [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1190.282381] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1190.283321] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4480105b-37a9-4b3e-b3ff-bc846f1bcbf3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.291890] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1190.292145] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f6b6539-dfb5-4f7b-94a4-04308245483a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.298737] env[65503]: DEBUG oslo_vmware.api [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1190.298737] env[65503]: value = "task-4450903" [ 1190.298737] env[65503]: _type = "Task" [ 1190.298737] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.307017] env[65503]: DEBUG oslo_vmware.api [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450903, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.351404] env[65503]: DEBUG oslo_concurrency.lockutils [None req-beddd96c-dff5-408e-b2b5-7669db481d1e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.075s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.364344] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450902, 'name': Rename_Task, 'duration_secs': 0.19398} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.364658] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1190.365020] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5dff7741-9e82-47c3-901b-fe27c91f4699 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.374745] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1190.374745] env[65503]: value = "task-4450904" [ 1190.374745] env[65503]: _type = "Task" [ 1190.374745] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.385056] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.465084] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.465305] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_power_states {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.667370] env[65503]: DEBUG nova.objects.instance [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'flavor' on Instance uuid 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.811114] env[65503]: DEBUG oslo_vmware.api [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450903, 'name': PowerOffVM_Task, 'duration_secs': 0.249337} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.811430] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1190.811529] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1190.811812] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e62433f0-9089-442d-b71c-73a37cc6e673 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.886373] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450904, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.888024] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1190.888354] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1190.888634] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Deleting the datastore file [datastore2] bc0dad37-fc1d-4edc-8680-dba294dd724e {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1190.889012] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54457e7f-d6c7-4a52-b4e3-d394236abbb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.897163] env[65503]: DEBUG oslo_vmware.api [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for the task: (returnval){ [ 1190.897163] env[65503]: value = "task-4450906" [ 1190.897163] env[65503]: _type = "Task" [ 1190.897163] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.905668] env[65503]: DEBUG oslo_vmware.api [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.969351] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Getting list of instances from cluster (obj){ [ 1190.969351] env[65503]: value = "domain-c8" [ 1190.969351] env[65503]: _type = "ClusterComputeResource" [ 1190.969351] env[65503]: } {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1190.970460] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8ef083-38d9-46c7-960f-7549a63db8b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.986254] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Got total of 5 instances {{(pid=65503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1190.986412] env[65503]: WARNING nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] While synchronizing instance power states, found 6 instances in the database and 5 instances on the hypervisor. [ 1190.986519] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Triggering sync for uuid d3ca90c9-3dfa-47a5-b48b-67a45ea26021 {{(pid=65503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1190.986820] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Triggering sync for uuid afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad {{(pid=65503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1190.987036] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Triggering sync for uuid bc0dad37-fc1d-4edc-8680-dba294dd724e {{(pid=65503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1190.987289] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Triggering sync for uuid 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 {{(pid=65503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1190.987488] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Triggering sync for uuid fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1190.987724] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Triggering sync for uuid 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1190.988329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.988329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.988607] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.988785] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.988936] env[65503]: INFO nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] During sync_power_state the instance has a pending task (rebuild_spawning). Skip. [ 1190.989108] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.989352] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.989564] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.989730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.989878] env[65503]: INFO nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] During sync_power_state the instance has a pending task (powering-on). Skip. [ 1190.990040] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.990279] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.990450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.990734] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.990917] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.991832] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a3b45b-f372-46fd-83c0-106469f663c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.995102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cac405-f987-4d4e-82f3-a90ccd568c91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.998080] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa099a60-9e4d-42d4-9b3c-de4da34cf2ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.176427] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.176696] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.176955] env[65503]: DEBUG nova.network.neutron [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1191.177262] env[65503]: DEBUG nova.objects.instance [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'info_cache' on Instance uuid 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.386273] env[65503]: DEBUG oslo_vmware.api [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450904, 'name': PowerOnVM_Task, 'duration_secs': 0.550224} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.386515] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1191.386720] env[65503]: DEBUG nova.compute.manager [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1191.387693] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588163ca-dfe7-439b-a071-2715be489c09 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.406418] env[65503]: DEBUG oslo_vmware.api [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Task: {'id': task-4450906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254831} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.406651] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1191.406819] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1191.406986] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1191.407167] env[65503]: INFO nova.compute.manager [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1191.407395] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1191.407657] env[65503]: DEBUG nova.compute.manager [-] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1191.407761] env[65503]: DEBUG nova.network.neutron [-] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1191.407992] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1191.408522] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1191.408779] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1191.444581] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1191.513931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.523s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.514431] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.524s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.514761] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.526s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.667223] env[65503]: DEBUG nova.compute.manager [req-9afa7e0c-b3fe-4c13-be79-77f0be1c6ccf req-70b5cf1c-e503-4f4d-b455-6242becc6ed8 service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Received event network-vif-deleted-f222ff54-daca-43ba-8e76-24669d7878e6 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1191.668020] env[65503]: INFO nova.compute.manager [req-9afa7e0c-b3fe-4c13-be79-77f0be1c6ccf req-70b5cf1c-e503-4f4d-b455-6242becc6ed8 service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Neutron deleted interface f222ff54-daca-43ba-8e76-24669d7878e6; detaching it from the instance and deleting it from the info cache [ 1191.668020] env[65503]: DEBUG nova.network.neutron [req-9afa7e0c-b3fe-4c13-be79-77f0be1c6ccf req-70b5cf1c-e503-4f4d-b455-6242becc6ed8 service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1191.681112] env[65503]: DEBUG nova.objects.base [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Object Instance<0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7> lazy-loaded attributes: flavor,info_cache {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1191.850077] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.850374] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.905910] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.905910] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.905910] env[65503]: DEBUG nova.objects.instance [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1192.149951] env[65503]: DEBUG nova.network.neutron [-] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1192.170746] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88c611ce-3909-469d-b70f-be99e21c2d82 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.181850] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7756f2c0-a84e-46cd-a5e1-84e909acdf95 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.192627] env[65503]: WARNING neutronclient.v2_0.client [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1192.193333] env[65503]: WARNING openstack [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1192.193696] env[65503]: WARNING openstack [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1192.220894] env[65503]: DEBUG nova.compute.manager [req-9afa7e0c-b3fe-4c13-be79-77f0be1c6ccf req-70b5cf1c-e503-4f4d-b455-6242becc6ed8 service nova] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Detach interface failed, port_id=f222ff54-daca-43ba-8e76-24669d7878e6, reason: Instance bc0dad37-fc1d-4edc-8680-dba294dd724e could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1192.308573] env[65503]: WARNING openstack [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1192.309013] env[65503]: WARNING openstack [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1192.323762] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.323930] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1192.370154] env[65503]: WARNING neutronclient.v2_0.client [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1192.370154] env[65503]: WARNING openstack [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1192.370154] env[65503]: WARNING openstack [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1192.494908] env[65503]: DEBUG nova.network.neutron [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1192.652325] env[65503]: INFO nova.compute.manager [-] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Took 1.24 seconds to deallocate network for instance. [ 1192.914282] env[65503]: DEBUG oslo_concurrency.lockutils [None req-51f0a410-7b3b-471e-9437-c1d6685a1e7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.997514] env[65503]: DEBUG oslo_concurrency.lockutils [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.158976] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.159287] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.159516] env[65503]: DEBUG nova.objects.instance [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lazy-loading 'resources' on Instance uuid bc0dad37-fc1d-4edc-8680-dba294dd724e {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.755623] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861705bb-f3b2-44d9-9973-a63ec49652a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.764328] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b6707f-9486-4655-9612-2e9f2fbaf3a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.796651] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9656e6-a627-4d3a-80c6-be749a8f054b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.805049] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30797352-2ccf-44e5-9ef4-7c1886756e79 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.819130] env[65503]: DEBUG nova.compute.provider_tree [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.003679] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1194.004065] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9732d3c8-96f9-47ee-b65f-4bd9e06c4fb3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.015701] env[65503]: DEBUG oslo_vmware.api [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1194.015701] env[65503]: value = "task-4450907" [ 1194.015701] env[65503]: _type = "Task" [ 1194.015701] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.024860] env[65503]: DEBUG oslo_vmware.api [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450907, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.323795] env[65503]: DEBUG nova.scheduler.client.report [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1194.526706] env[65503]: DEBUG oslo_vmware.api [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450907, 'name': PowerOnVM_Task, 'duration_secs': 0.405258} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.526995] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1194.527212] env[65503]: DEBUG nova.compute.manager [None req-79057aef-b412-420e-bb2c-5046bf7ba63b tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1194.528054] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3aec3f-8a5e-41aa-aef5-6d0aea8df78e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.831311] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.851958] env[65503]: INFO nova.scheduler.client.report [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Deleted allocations for instance bc0dad37-fc1d-4edc-8680-dba294dd724e [ 1195.324198] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.360529] env[65503]: DEBUG oslo_concurrency.lockutils [None req-e7e87835-0dca-49f9-b6b1-c8711690a52a tempest-ServerRescueTestJSON-1124687769 tempest-ServerRescueTestJSON-1124687769-project-member] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.585s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.361431] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.372s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.361618] env[65503]: INFO nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1195.361778] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "bc0dad37-fc1d-4edc-8680-dba294dd724e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.829548] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.829766] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.829937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.830109] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1195.831034] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c6253c-4c32-46d6-a990-7e6cd4d8a145 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.839906] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa01f09e-20c6-4d7c-94ba-2063abc380e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.855690] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755f468e-2e1f-44b6-8563-823f8956d784 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.867081] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf6468c-54c3-4729-b135-2173d164f50f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.901121] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179109MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1195.901285] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.901547] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.933059] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1196.933059] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1196.933059] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1196.933059] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance fb2dddac-4ac0-498a-b972-e61255833ad0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1196.933059] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1196.933059] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1196.933059] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '5', 'num_vm_active': '5', 'num_task_None': '5', 'num_os_type_None': '5', 'num_proj_521d40776571452e85178972f97c8622': '2', 'io_workload': '0', 'num_proj_93906c603f7a4b18a34fc4b42fb6d6c1': '1', 'num_proj_592efb180976432cbcecb9ad421e1bd1': '1', 'num_proj_3658921b747e4d78a2046b838cb36d26': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1197.008062] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b2945b-cda4-486f-866c-c4072be4f5c2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.017395] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd661b1-b342-459c-aefa-88b1a6b1acaf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.050077] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f84ef4-134d-40f2-8eb5-0cb68c89cbab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.058540] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbfc1aa-b74f-4ef6-89a9-1c9c1aa5b876 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.072502] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.579735] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.086041] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1198.086428] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.185s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.591753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.592017] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.086218] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.086588] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.086633] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.094269] env[65503]: DEBUG nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1199.619055] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.619362] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.620876] env[65503]: INFO nova.compute.claims [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1200.714330] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b978620c-55c5-4e32-92db-85630b31a0ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.723546] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ca3146-7412-462c-8bee-8ec8a74c59e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.754436] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ee6e34-d495-4d38-a7df-f910d8005228 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.762870] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72b9e04-bb3c-4757-b4fd-d5c81ecddff2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.776978] env[65503]: DEBUG nova.compute.provider_tree [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.280114] env[65503]: DEBUG nova.scheduler.client.report [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1201.785214] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.166s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.785761] env[65503]: DEBUG nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1202.291492] env[65503]: DEBUG nova.compute.utils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1202.292944] env[65503]: DEBUG nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 1202.794499] env[65503]: DEBUG nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1203.804994] env[65503]: DEBUG nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1203.832380] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1203.832631] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1203.832782] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1203.832961] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1203.833118] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1203.833259] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1203.833459] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1203.833611] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1203.833842] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1203.834046] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1203.834250] env[65503]: DEBUG nova.virt.hardware [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1203.835133] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410741c8-155c-4d50-9816-1192a92aae6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.844007] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b166972a-d3cc-45b6-bd71-b1536423b324 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.858229] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1203.863678] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Creating folder: Project (09d240eb65604cabaa35ee00fafdb42c). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1203.863967] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af263ce0-e11f-4e0b-83e6-80691b95143f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.875693] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Created folder: Project (09d240eb65604cabaa35ee00fafdb42c) in parent group-v870190. [ 1203.875870] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Creating folder: Instances. Parent ref: group-v870502. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1203.876160] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb271bd8-9ebe-405a-92f4-98e6137b9dfe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.884865] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Created folder: Instances in parent group-v870502. [ 1203.885104] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1203.885301] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1203.885507] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2349071-07e6-4d34-8ad0-26bfd151deec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.902317] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1203.902317] env[65503]: value = "task-4450910" [ 1203.902317] env[65503]: _type = "Task" [ 1203.902317] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.909816] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450910, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.413696] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450910, 'name': CreateVM_Task, 'duration_secs': 0.275212} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.413892] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1204.414445] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.414586] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.414932] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1204.415294] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7746978-c02d-41a7-8221-9cd8c4a88200 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.420733] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1204.420733] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52667964-6239-9068-a5fd-e50ef3f55c98" [ 1204.420733] env[65503]: _type = "Task" [ 1204.420733] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.429425] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52667964-6239-9068-a5fd-e50ef3f55c98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.931929] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52667964-6239-9068-a5fd-e50ef3f55c98, 'name': SearchDatastore_Task, 'duration_secs': 0.010387} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.932317] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.932574] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1204.932767] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.933022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.933136] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1204.933348] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04f4c5d1-8e39-41a3-84b8-a5f317acc424 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.943086] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1204.943277] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1204.944023] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cd8947b-ec7e-47b3-8dc4-e4b22f3ddc18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.949953] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1204.949953] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e57d0d-eab1-aa48-234c-428f756a8664" [ 1204.949953] env[65503]: _type = "Task" [ 1204.949953] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.959054] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e57d0d-eab1-aa48-234c-428f756a8664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.461490] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e57d0d-eab1-aa48-234c-428f756a8664, 'name': SearchDatastore_Task, 'duration_secs': 0.009711} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.462302] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbfa5655-c962-4e63-94ad-845f8659dd76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.468158] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1205.468158] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ec3245-331a-f1e2-96f4-57cefb19698e" [ 1205.468158] env[65503]: _type = "Task" [ 1205.468158] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.476466] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ec3245-331a-f1e2-96f4-57cefb19698e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.981783] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ec3245-331a-f1e2-96f4-57cefb19698e, 'name': SearchDatastore_Task, 'duration_secs': 0.009912} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.982170] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.982527] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1205.982874] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d47d202-3d2a-42bd-a19c-012a9f4d6dcf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.993298] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1205.993298] env[65503]: value = "task-4450911" [ 1205.993298] env[65503]: _type = "Task" [ 1205.993298] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.004603] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.505810] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450911, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495718} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.505810] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1206.505810] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1206.506098] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19346333-270a-4eb3-aeac-06cd75bcd0b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.512870] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1206.512870] env[65503]: value = "task-4450912" [ 1206.512870] env[65503]: _type = "Task" [ 1206.512870] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.522372] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450912, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.023935] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450912, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204584} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.024354] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1207.025143] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522f3b98-8395-4c81-8a36-77b48370af66 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.045530] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1207.045844] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-225cd2fa-e0ee-4868-af56-f8de5aa5b0a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.066188] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1207.066188] env[65503]: value = "task-4450913" [ 1207.066188] env[65503]: _type = "Task" [ 1207.066188] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.075441] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.134334] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.134635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.134854] env[65503]: INFO nova.compute.manager [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Shelving [ 1207.576679] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450913, 'name': ReconfigVM_Task, 'duration_secs': 0.261617} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.577033] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1207.577753] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25830717-e1a5-4353-8b0f-4d78296fe5e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.585663] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1207.585663] env[65503]: value = "task-4450914" [ 1207.585663] env[65503]: _type = "Task" [ 1207.585663] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.594865] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450914, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.096154] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450914, 'name': Rename_Task, 'duration_secs': 0.136759} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.096596] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1208.096740] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97c95062-d80c-4272-b459-3489d76f0cd7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.104119] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1208.104119] env[65503]: value = "task-4450915" [ 1208.104119] env[65503]: _type = "Task" [ 1208.104119] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.112974] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450915, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.144175] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1208.144530] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-289ffee6-4db0-43a4-be0b-94d4bb407d1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.152473] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1208.152473] env[65503]: value = "task-4450916" [ 1208.152473] env[65503]: _type = "Task" [ 1208.152473] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.162308] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.615321] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450915, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.663038] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450916, 'name': PowerOffVM_Task, 'duration_secs': 0.18204} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.663349] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1208.664210] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c182ff1c-959e-488e-b9e2-8b0dff27b4d2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.683487] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316a4ec3-fa61-4529-a85b-495769fc4863 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.116247] env[65503]: DEBUG oslo_vmware.api [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450915, 'name': PowerOnVM_Task, 'duration_secs': 0.743963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.116786] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1209.116786] env[65503]: INFO nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Took 5.31 seconds to spawn the instance on the hypervisor. [ 1209.116937] env[65503]: DEBUG nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1209.117763] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81998115-471d-4169-bb35-9e40a0d0fcdc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.195692] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1209.196031] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-60904670-917e-4c48-8fed-16d715b05316 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.204407] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1209.204407] env[65503]: value = "task-4450917" [ 1209.204407] env[65503]: _type = "Task" [ 1209.204407] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.213561] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450917, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.636677] env[65503]: INFO nova.compute.manager [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Took 10.04 seconds to build instance. [ 1209.715059] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450917, 'name': CreateSnapshot_Task, 'duration_secs': 0.462209} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.715382] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1209.716183] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8a49f3-af7d-4c9d-8dd7-474e58f26cf6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.138782] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7f78f2de-8470-450b-a00e-dc468f89590d tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.547s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.235249] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1210.235600] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ae9464eb-9a70-4074-8e3a-58c4fb22ae3c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.247855] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1210.247855] env[65503]: value = "task-4450918" [ 1210.247855] env[65503]: _type = "Task" [ 1210.247855] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.256480] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450918, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.409101] env[65503]: INFO nova.compute.manager [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Rebuilding instance [ 1210.458150] env[65503]: DEBUG nova.compute.manager [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1210.459037] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b6d244-1b55-4106-9fdc-085e0421a201 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.759108] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450918, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.260528] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450918, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.473358] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1211.473702] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f49ff843-8d3c-43a9-812f-de627b2f1917 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.481304] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1211.481304] env[65503]: value = "task-4450919" [ 1211.481304] env[65503]: _type = "Task" [ 1211.481304] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.491483] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.760502] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450918, 'name': CloneVM_Task, 'duration_secs': 1.352008} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.760773] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Created linked-clone VM from snapshot [ 1211.761535] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684c4772-c98c-4883-8698-4ed1d6a642c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.768826] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Uploading image 22368b12-a541-49dd-98b8-5368ea101029 {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1211.789948] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1211.789948] env[65503]: value = "vm-870506" [ 1211.789948] env[65503]: _type = "VirtualMachine" [ 1211.789948] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1211.790254] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c6554366-b23e-4e29-a349-49ca268bd531 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.797681] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease: (returnval){ [ 1211.797681] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52519935-9cab-c8b6-2fc3-152d503ab9d0" [ 1211.797681] env[65503]: _type = "HttpNfcLease" [ 1211.797681] env[65503]: } obtained for exporting VM: (result){ [ 1211.797681] env[65503]: value = "vm-870506" [ 1211.797681] env[65503]: _type = "VirtualMachine" [ 1211.797681] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1211.797969] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the lease: (returnval){ [ 1211.797969] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52519935-9cab-c8b6-2fc3-152d503ab9d0" [ 1211.797969] env[65503]: _type = "HttpNfcLease" [ 1211.797969] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1211.805190] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.805190] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52519935-9cab-c8b6-2fc3-152d503ab9d0" [ 1211.805190] env[65503]: _type = "HttpNfcLease" [ 1211.805190] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1211.991578] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450919, 'name': PowerOffVM_Task, 'duration_secs': 0.194269} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.991855] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1211.992438] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1211.993233] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54275dd6-fa5e-46d5-8544-e44ea9884171 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.000472] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1212.001437] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c05f221-0ec0-452b-b82a-e6abf8501943 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.031931] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1212.032168] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1212.032291] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Deleting the datastore file [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1212.032615] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-781c27c8-2ba1-4d2f-b202-c0d0852b8a8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.039661] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1212.039661] env[65503]: value = "task-4450922" [ 1212.039661] env[65503]: _type = "Task" [ 1212.039661] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.049226] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.307045] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1212.307045] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52519935-9cab-c8b6-2fc3-152d503ab9d0" [ 1212.307045] env[65503]: _type = "HttpNfcLease" [ 1212.307045] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1212.307045] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1212.307045] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52519935-9cab-c8b6-2fc3-152d503ab9d0" [ 1212.307045] env[65503]: _type = "HttpNfcLease" [ 1212.307045] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1212.307777] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7c0136-c534-4e34-aba7-74431f795032 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.315593] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f2ad7b-eefd-df4f-e88e-c65cc8a50f0e/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1212.315768] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f2ad7b-eefd-df4f-e88e-c65cc8a50f0e/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1212.405188] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e8a6bc6c-1bbf-4b95-8773-c77bc23b3f82 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.550590] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091352} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.550803] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1212.550987] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1212.551182] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1213.589874] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1213.590352] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1213.590521] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1213.590907] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1213.591496] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1213.591734] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1213.591985] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1213.592316] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1213.592542] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1213.592776] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1213.592973] env[65503]: DEBUG nova.virt.hardware [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1213.593906] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71d9b0a-0e0b-45aa-bce0-94db33575d5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.602740] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdd9cc2-1995-454e-88b4-5015aae60616 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.619180] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1213.625354] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1213.625741] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1213.626120] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f02ba06f-0434-41d3-a759-0887ffe5282a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.646677] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1213.646677] env[65503]: value = "task-4450923" [ 1213.646677] env[65503]: _type = "Task" [ 1213.646677] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.656892] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450923, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.157684] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450923, 'name': CreateVM_Task, 'duration_secs': 0.397978} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.157958] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1214.158588] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.158874] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1214.159504] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1214.159773] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-823a2351-0b70-4151-be05-2e2a34e363b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.166107] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1214.166107] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528dfeb8-9536-23de-f5ab-ab4455e13822" [ 1214.166107] env[65503]: _type = "Task" [ 1214.166107] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.175581] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528dfeb8-9536-23de-f5ab-ab4455e13822, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.678178] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528dfeb8-9536-23de-f5ab-ab4455e13822, 'name': SearchDatastore_Task, 'duration_secs': 0.013897} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.678671] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.678980] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1214.679343] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.679545] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1214.679836] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1214.680331] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4151ff3e-ef30-4745-90a3-f60606ffa8a0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.691730] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1214.692040] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1214.692998] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1425ca14-a9c4-4c67-9423-d42c1a06c3d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.700895] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1214.700895] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52beaa82-5422-5257-5c2c-3864ca6a6557" [ 1214.700895] env[65503]: _type = "Task" [ 1214.700895] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.712197] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52beaa82-5422-5257-5c2c-3864ca6a6557, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.213212] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52beaa82-5422-5257-5c2c-3864ca6a6557, 'name': SearchDatastore_Task, 'duration_secs': 0.012896} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.214220] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f65626b-3252-44ad-8d25-9d6d70228b77 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.220999] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1215.220999] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5294583b-1f79-d899-d894-b4647bddae23" [ 1215.220999] env[65503]: _type = "Task" [ 1215.220999] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.231009] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5294583b-1f79-d899-d894-b4647bddae23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.735515] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5294583b-1f79-d899-d894-b4647bddae23, 'name': SearchDatastore_Task, 'duration_secs': 0.022587} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.735866] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.736224] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1215.736593] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2519eb12-bc02-4193-81c5-c727a26ceae9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.746392] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1215.746392] env[65503]: value = "task-4450924" [ 1215.746392] env[65503]: _type = "Task" [ 1215.746392] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.756547] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.258511] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450924, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.759618] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71489} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.760069] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1216.760154] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1216.760392] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-017e9992-ab43-49b9-8544-e67087ccb6d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.769192] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1216.769192] env[65503]: value = "task-4450925" [ 1216.769192] env[65503]: _type = "Task" [ 1216.769192] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.778854] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.279058] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091032} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.279356] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1217.280172] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69138c1-7dfb-4fcc-a57b-4b55b8b8d405 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.301060] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1217.301365] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18fde462-7a1d-4cf0-9cd8-1f590356322a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.321526] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1217.321526] env[65503]: value = "task-4450926" [ 1217.321526] env[65503]: _type = "Task" [ 1217.321526] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.330634] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.834462] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450926, 'name': ReconfigVM_Task, 'duration_secs': 0.38447} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.834887] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7/8d4ff5ca-12c6-488a-9a23-495d24e012e7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1217.835465] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8bb4705-0d2f-4ac1-b834-a994455e4f50 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.843454] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1217.843454] env[65503]: value = "task-4450927" [ 1217.843454] env[65503]: _type = "Task" [ 1217.843454] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.853515] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450927, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.354104] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450927, 'name': Rename_Task, 'duration_secs': 0.176661} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.354426] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1218.354680] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b73eb084-8723-408e-b782-6a884924c8cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.362089] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1218.362089] env[65503]: value = "task-4450928" [ 1218.362089] env[65503]: _type = "Task" [ 1218.362089] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.371210] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.873594] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450928, 'name': PowerOnVM_Task} progress is 71%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.373346] env[65503]: DEBUG oslo_vmware.api [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450928, 'name': PowerOnVM_Task, 'duration_secs': 0.696026} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.373634] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1219.373841] env[65503]: DEBUG nova.compute.manager [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1219.374711] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bd44ae-e2e7-43cc-89c2-97ff205fbc89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.892235] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.892614] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.892826] env[65503]: DEBUG nova.objects.instance [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1220.178010] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.178406] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.178688] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.178931] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.179127] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.181233] env[65503]: INFO nova.compute.manager [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Terminating instance [ 1220.684926] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "refresh_cache-8d4ff5ca-12c6-488a-9a23-495d24e012e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.685144] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquired lock "refresh_cache-8d4ff5ca-12c6-488a-9a23-495d24e012e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.685307] env[65503]: DEBUG nova.network.neutron [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1220.852259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.852609] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.852831] env[65503]: INFO nova.compute.manager [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Shelving [ 1220.901545] env[65503]: DEBUG oslo_concurrency.lockutils [None req-17d97a3d-1645-480c-81b5-6fea4ad57e0b tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.187785] env[65503]: WARNING neutronclient.v2_0.client [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1221.188475] env[65503]: WARNING openstack [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1221.188828] env[65503]: WARNING openstack [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1221.212138] env[65503]: DEBUG nova.network.neutron [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1221.267712] env[65503]: DEBUG nova.network.neutron [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1221.478665] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f2ad7b-eefd-df4f-e88e-c65cc8a50f0e/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1221.479769] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73184aea-5ff4-48e5-b6d0-51ef841b05b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.486605] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f2ad7b-eefd-df4f-e88e-c65cc8a50f0e/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1221.486789] env[65503]: ERROR oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f2ad7b-eefd-df4f-e88e-c65cc8a50f0e/disk-0.vmdk due to incomplete transfer. [ 1221.487038] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0505029e-86ad-47ba-a691-9cce02c8e490 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.495059] env[65503]: DEBUG oslo_vmware.rw_handles [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f2ad7b-eefd-df4f-e88e-c65cc8a50f0e/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1221.495059] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Uploaded image 22368b12-a541-49dd-98b8-5368ea101029 to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1221.497730] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1221.497992] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-450eb201-f55f-4ba6-b783-9d3b0ba3e335 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.505103] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1221.505103] env[65503]: value = "task-4450929" [ 1221.505103] env[65503]: _type = "Task" [ 1221.505103] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.513990] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450929, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.770640] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Releasing lock "refresh_cache-8d4ff5ca-12c6-488a-9a23-495d24e012e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.771052] env[65503]: DEBUG nova.compute.manager [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1221.771279] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.772196] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907c3435-b942-4555-bec0-a9478d1ec31f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.781376] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.781771] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01edbb31-a4e0-42f5-a9e6-553acb739344 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.791371] env[65503]: DEBUG oslo_vmware.api [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1221.791371] env[65503]: value = "task-4450930" [ 1221.791371] env[65503]: _type = "Task" [ 1221.791371] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.801308] env[65503]: DEBUG oslo_vmware.api [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.862693] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.863111] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28f25bb0-746f-4e88-bd17-5d7dca856570 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.870663] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1221.870663] env[65503]: value = "task-4450931" [ 1221.870663] env[65503]: _type = "Task" [ 1221.870663] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.881437] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.019092] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450929, 'name': Destroy_Task, 'duration_secs': 0.409541} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.019514] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Destroyed the VM [ 1222.019890] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1222.020289] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b8589af8-469f-4ef3-9c30-a7f01fd1785b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.028948] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1222.028948] env[65503]: value = "task-4450932" [ 1222.028948] env[65503]: _type = "Task" [ 1222.028948] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.039043] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450932, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.303247] env[65503]: DEBUG oslo_vmware.api [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450930, 'name': PowerOffVM_Task, 'duration_secs': 0.113664} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.303557] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.303732] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.303988] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-967a70d1-256d-418f-bafb-6c8fb273bc18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.329378] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.329608] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.329786] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Deleting the datastore file [datastore2] 8d4ff5ca-12c6-488a-9a23-495d24e012e7 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.330447] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcff5d40-5cba-442a-9120-a5d9ceede1b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.338194] env[65503]: DEBUG oslo_vmware.api [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for the task: (returnval){ [ 1222.338194] env[65503]: value = "task-4450934" [ 1222.338194] env[65503]: _type = "Task" [ 1222.338194] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.346661] env[65503]: DEBUG oslo_vmware.api [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.381556] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450931, 'name': PowerOffVM_Task, 'duration_secs': 0.250615} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.381556] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.382254] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddb05fb-a81b-440f-a64d-2b826a49437c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.402369] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fb3716-789e-44d8-b268-b3b0639ad3a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.539838] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450932, 'name': RemoveSnapshot_Task, 'duration_secs': 0.387396} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.540142] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1222.540415] env[65503]: DEBUG nova.compute.manager [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1222.541242] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0776a857-2d6e-48a5-8bbd-8e00db0585f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.848974] env[65503]: DEBUG oslo_vmware.api [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Task: {'id': task-4450934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107438} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.849249] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.849435] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.849607] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.849772] env[65503]: INFO nova.compute.manager [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1222.850018] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1222.850218] env[65503]: DEBUG nova.compute.manager [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1222.850312] env[65503]: DEBUG nova.network.neutron [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1222.850558] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1222.851110] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1222.851367] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1222.872874] env[65503]: DEBUG nova.network.neutron [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1222.873135] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1222.914908] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Creating Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1222.915597] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5b6563aa-03ce-4fac-a875-6ae3c08c92db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.924405] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1222.924405] env[65503]: value = "task-4450935" [ 1222.924405] env[65503]: _type = "Task" [ 1222.924405] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.933909] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450935, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.053561] env[65503]: INFO nova.compute.manager [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Shelve offloading [ 1223.375511] env[65503]: DEBUG nova.network.neutron [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1223.434450] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450935, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.557366] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1223.557763] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5dd1b2d-05e7-4270-8114-1b38f8b70a37 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.565592] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1223.565592] env[65503]: value = "task-4450936" [ 1223.565592] env[65503]: _type = "Task" [ 1223.565592] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.576077] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.879070] env[65503]: INFO nova.compute.manager [-] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Took 1.03 seconds to deallocate network for instance. [ 1223.934990] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450935, 'name': CreateSnapshot_Task, 'duration_secs': 0.892749} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.935287] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Created Snapshot of the VM instance {{(pid=65503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1223.936038] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f0baa1-7a38-4b33-b251-62bda59f5f1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.076403] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1224.076814] env[65503]: DEBUG nova.compute.manager [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1224.077363] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11ee1c9-a9d5-4c93-ad00-f2ad2ff792b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.083076] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.083305] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.083397] env[65503]: DEBUG nova.network.neutron [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1224.385515] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.385809] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.386036] env[65503]: DEBUG nova.objects.instance [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lazy-loading 'resources' on Instance uuid 8d4ff5ca-12c6-488a-9a23-495d24e012e7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1224.453617] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Creating linked-clone VM from snapshot {{(pid=65503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1224.453961] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0b19fa1f-4d8a-43f8-ae4b-34f69db8554f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.463940] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1224.463940] env[65503]: value = "task-4450937" [ 1224.463940] env[65503]: _type = "Task" [ 1224.463940] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.472300] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450937, 'name': CloneVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.585903] env[65503]: WARNING neutronclient.v2_0.client [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1224.586852] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1224.587277] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1224.711314] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1224.711802] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1224.772015] env[65503]: WARNING neutronclient.v2_0.client [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1224.772710] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1224.773084] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1224.857943] env[65503]: DEBUG nova.network.neutron [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7da6c50-23", "ovs_interfaceid": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1224.973316] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450937, 'name': CloneVM_Task} progress is 94%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.987075] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c815fec-7cdf-4627-ac4f-81122918ac8d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.996137] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063b4d80-a5ef-421b-8987-ed5e12217cec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.030751] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0919afb4-1f63-48a3-bcb9-fcaeddc78fb0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.040329] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5c43f0-a6b7-4e1b-a7f8-1300c86c2ff5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.055864] env[65503]: DEBUG nova.compute.provider_tree [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.361118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.361596] env[65503]: WARNING neutronclient.v2_0.client [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1225.362206] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1225.362559] env[65503]: WARNING openstack [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1225.368159] env[65503]: WARNING neutronclient.v2_0.client [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1225.473258] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450937, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.558894] env[65503]: DEBUG nova.scheduler.client.report [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1225.675597] env[65503]: DEBUG nova.compute.manager [req-72fbc545-07eb-41b9-bd8a-bedf242153d1 req-d40898e6-539c-45dd-b56e-a8b646c9e2ef service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-vif-unplugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1225.675890] env[65503]: DEBUG oslo_concurrency.lockutils [req-72fbc545-07eb-41b9-bd8a-bedf242153d1 req-d40898e6-539c-45dd-b56e-a8b646c9e2ef service nova] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.676121] env[65503]: DEBUG oslo_concurrency.lockutils [req-72fbc545-07eb-41b9-bd8a-bedf242153d1 req-d40898e6-539c-45dd-b56e-a8b646c9e2ef service nova] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.676317] env[65503]: DEBUG oslo_concurrency.lockutils [req-72fbc545-07eb-41b9-bd8a-bedf242153d1 req-d40898e6-539c-45dd-b56e-a8b646c9e2ef service nova] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.677148] env[65503]: DEBUG nova.compute.manager [req-72fbc545-07eb-41b9-bd8a-bedf242153d1 req-d40898e6-539c-45dd-b56e-a8b646c9e2ef service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] No waiting events found dispatching network-vif-unplugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1225.677344] env[65503]: WARNING nova.compute.manager [req-72fbc545-07eb-41b9-bd8a-bedf242153d1 req-d40898e6-539c-45dd-b56e-a8b646c9e2ef service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received unexpected event network-vif-unplugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 for instance with vm_state shelved and task_state shelving_offloading. [ 1225.753542] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1225.754821] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb38d0ca-0a71-455d-b8c1-39f29ed9384a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.762759] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1225.763023] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e99b8994-fe2b-4941-a4e1-d762db90a197 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.974013] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450937, 'name': CloneVM_Task} progress is 95%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.999448] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1225.999606] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1225.999791] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleting the datastore file [datastore1] fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1226.000107] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d16b3ff-ace6-4050-8802-d8cb30b8b282 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.007973] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1226.007973] env[65503]: value = "task-4450939" [ 1226.007973] env[65503]: _type = "Task" [ 1226.007973] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.016918] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.017167] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.017395] env[65503]: INFO nova.compute.manager [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Rebooting instance [ 1226.018889] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.064831] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.679s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.089676] env[65503]: INFO nova.scheduler.client.report [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Deleted allocations for instance 8d4ff5ca-12c6-488a-9a23-495d24e012e7 [ 1226.475077] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450937, 'name': CloneVM_Task, 'duration_secs': 1.617528} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.475447] env[65503]: INFO nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Created linked-clone VM from snapshot [ 1226.476265] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddc1631-b870-40bb-82e4-05407e45ff4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.485024] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Uploading image 07b5b971-b74b-4847-966b-a592b53a880d {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1226.514096] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1226.514096] env[65503]: value = "vm-870509" [ 1226.514096] env[65503]: _type = "VirtualMachine" [ 1226.514096] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1226.514515] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cd78a336-1cba-4719-8a3c-bd3c0c2007fb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.524859] env[65503]: DEBUG oslo_vmware.api [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450939, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162362} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.526607] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.526842] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1226.527088] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1226.529139] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease: (returnval){ [ 1226.529139] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52afbfaa-3e6c-9351-bb8c-04405321f4bd" [ 1226.529139] env[65503]: _type = "HttpNfcLease" [ 1226.529139] env[65503]: } obtained for exporting VM: (result){ [ 1226.529139] env[65503]: value = "vm-870509" [ 1226.529139] env[65503]: _type = "VirtualMachine" [ 1226.529139] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1226.529357] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the lease: (returnval){ [ 1226.529357] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52afbfaa-3e6c-9351-bb8c-04405321f4bd" [ 1226.529357] env[65503]: _type = "HttpNfcLease" [ 1226.529357] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1226.536383] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1226.536383] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52afbfaa-3e6c-9351-bb8c-04405321f4bd" [ 1226.536383] env[65503]: _type = "HttpNfcLease" [ 1226.536383] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1226.536680] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1226.536680] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52afbfaa-3e6c-9351-bb8c-04405321f4bd" [ 1226.536680] env[65503]: _type = "HttpNfcLease" [ 1226.536680] env[65503]: }. {{(pid=65503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1226.537457] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51380666-ec57-4b46-97d3-e5f908e793c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.546153] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cdd986-bcfd-1710-0aef-e9593ff6e798/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1226.546468] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cdd986-bcfd-1710-0aef-e9593ff6e798/disk-0.vmdk for reading. {{(pid=65503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1226.548425] env[65503]: INFO nova.scheduler.client.report [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted allocations for instance fb2dddac-4ac0-498a-b972-e61255833ad0 [ 1226.552411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.552582] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.552750] env[65503]: DEBUG nova.network.neutron [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1226.610259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6fb6c300-52fa-4b74-b8f8-0848bc0178df tempest-ServerShowV257Test-1214169527 tempest-ServerShowV257Test-1214169527-project-member] Lock "8d4ff5ca-12c6-488a-9a23-495d24e012e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.432s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.642931] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-07c36f0e-893f-465f-b8d5-51020faf284c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.053045] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.053902] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.053996] env[65503]: DEBUG nova.objects.instance [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'resources' on Instance uuid fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.055745] env[65503]: WARNING neutronclient.v2_0.client [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1227.056196] env[65503]: WARNING openstack [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1227.056876] env[65503]: WARNING openstack [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1227.065986] env[65503]: DEBUG nova.objects.instance [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'numa_topology' on Instance uuid fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.194810] env[65503]: WARNING openstack [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1227.195382] env[65503]: WARNING openstack [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1227.272695] env[65503]: WARNING neutronclient.v2_0.client [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1227.273493] env[65503]: WARNING openstack [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1227.273932] env[65503]: WARNING openstack [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1227.376117] env[65503]: DEBUG nova.network.neutron [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1227.568623] env[65503]: DEBUG nova.objects.base [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1227.647439] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c06581-9e08-4a45-9659-66cc0a02582b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.657291] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20bb74e-9615-459a-a776-6779126fee8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.691734] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e21e48f-7a36-45f9-8a89-9a1ac64caea9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.703404] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186a326b-3d1c-4ead-8e39-a48225c7c898 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.714108] env[65503]: DEBUG nova.compute.manager [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1227.714495] env[65503]: DEBUG nova.compute.manager [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing instance network info cache due to event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1227.714739] env[65503]: DEBUG oslo_concurrency.lockutils [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Acquiring lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.715032] env[65503]: DEBUG oslo_concurrency.lockutils [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Acquired lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.715363] env[65503]: DEBUG nova.network.neutron [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1227.727634] env[65503]: DEBUG nova.compute.provider_tree [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.882486] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.230765] env[65503]: WARNING neutronclient.v2_0.client [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1228.231633] env[65503]: WARNING openstack [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1228.232025] env[65503]: WARNING openstack [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1228.241405] env[65503]: DEBUG nova.scheduler.client.report [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.361388] env[65503]: WARNING openstack [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1228.362162] env[65503]: WARNING openstack [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1228.387453] env[65503]: DEBUG nova.compute.manager [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1228.388479] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb91ce0-47ae-458c-ad0a-4b619ec7739d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.444055] env[65503]: WARNING neutronclient.v2_0.client [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1228.444813] env[65503]: WARNING openstack [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1228.445201] env[65503]: WARNING openstack [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1228.556979] env[65503]: DEBUG nova.network.neutron [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updated VIF entry in instance network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1228.557168] env[65503]: DEBUG nova.network.neutron [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc7da6c50-23", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1228.748092] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.694s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.061087] env[65503]: DEBUG oslo_concurrency.lockutils [req-2b4b6b3f-df21-4d65-9212-8df163ef2322 req-84ca9a23-90bd-47e4-8c1f-fcdb000e9a76 service nova] Releasing lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.197897] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.226353] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.226686] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.255806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-119be1c9-b446-4eb8-8e5a-46c3544ca326 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.121s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.256504] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.059s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.256852] env[65503]: INFO nova.compute.manager [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Unshelving [ 1229.411429] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeae33c8-0e7f-4703-b312-12d0f3c21ad3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.420767] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Doing hard reboot of VM {{(pid=65503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1229.421058] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-e80f00a4-cdfa-4cea-92b9-eea5867d9111 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.429080] env[65503]: DEBUG oslo_vmware.api [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1229.429080] env[65503]: value = "task-4450941" [ 1229.429080] env[65503]: _type = "Task" [ 1229.429080] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.439360] env[65503]: DEBUG oslo_vmware.api [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450941, 'name': ResetVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.729977] env[65503]: INFO nova.compute.manager [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Detaching volume 94222922-f7c9-48bc-96b8-a28f0f9ddbbe [ 1229.769255] env[65503]: INFO nova.virt.block_device [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Attempting to driver detach volume 94222922-f7c9-48bc-96b8-a28f0f9ddbbe from mountpoint /dev/sdb [ 1229.769553] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1229.769693] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1229.770648] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b17e5a-1685-4f96-bc6a-bfe9df99ee6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.794728] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7563ef06-2d90-442e-abff-3102143679c3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.804190] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b146d66-482d-487b-ba41-d7524d01e478 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.827998] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0858e1-9857-432b-8199-308ec46fce6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.847390] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The volume has not been displaced from its original location: [datastore1] volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe/volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1229.853057] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1229.853482] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcf46fa8-96a8-4554-92dd-47c1dbaad095 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.873945] env[65503]: DEBUG oslo_vmware.api [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1229.873945] env[65503]: value = "task-4450942" [ 1229.873945] env[65503]: _type = "Task" [ 1229.873945] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.884207] env[65503]: DEBUG oslo_vmware.api [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450942, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.939974] env[65503]: DEBUG oslo_vmware.api [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450941, 'name': ResetVM_Task, 'duration_secs': 0.105304} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.940265] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Did hard reboot of VM {{(pid=65503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1229.940456] env[65503]: DEBUG nova.compute.manager [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1229.941322] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb503a18-828e-4235-a6f0-9ae1bc87db93 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.284957] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.285290] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.285496] env[65503]: DEBUG nova.objects.instance [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'pci_requests' on Instance uuid fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.385530] env[65503]: DEBUG oslo_vmware.api [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450942, 'name': ReconfigVM_Task, 'duration_secs': 0.28524} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.385861] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1230.391299] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1bb2c77-7faa-49bf-93bb-d092dbb19e82 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.409070] env[65503]: DEBUG oslo_vmware.api [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1230.409070] env[65503]: value = "task-4450943" [ 1230.409070] env[65503]: _type = "Task" [ 1230.409070] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.418817] env[65503]: DEBUG oslo_vmware.api [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.455377] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5649b91b-4d1a-4a05-aa1a-7e50b00f80c2 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.438s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.789969] env[65503]: DEBUG nova.objects.instance [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'numa_topology' on Instance uuid fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.923035] env[65503]: DEBUG oslo_vmware.api [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450943, 'name': ReconfigVM_Task, 'duration_secs': 0.165708} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.923479] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870498', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'name': 'volume-94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad', 'attached_at': '', 'detached_at': '', 'volume_id': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe', 'serial': '94222922-f7c9-48bc-96b8-a28f0f9ddbbe'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1231.293083] env[65503]: INFO nova.compute.claims [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.475815] env[65503]: DEBUG nova.objects.instance [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'flavor' on Instance uuid afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.377584] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027a7181-1f55-4e53-bab3-3252bb57f03b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.386012] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edf8f33-971c-4d5f-b5ff-4cc3857c0ef7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.418268] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bbace8-c6fb-43c2-8259-548561fcab07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.426851] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f265f07b-ecc4-4c39-a2c4-5e83adc13692 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.441238] env[65503]: DEBUG nova.compute.provider_tree [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.483237] env[65503]: DEBUG oslo_concurrency.lockutils [None req-b8e2710b-382c-4395-b6d3-0537ca6c4aa6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.256s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.944659] env[65503]: DEBUG nova.scheduler.client.report [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1233.451153] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.166s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.451546] env[65503]: WARNING neutronclient.v2_0.client [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1233.494049] env[65503]: INFO nova.network.neutron [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating port c7da6c50-239a-487b-91ca-3e82cb4d3794 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1233.502634] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.502906] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.503138] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.503329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.503494] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.505688] env[65503]: INFO nova.compute.manager [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Terminating instance [ 1233.837222] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cdd986-bcfd-1710-0aef-e9593ff6e798/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1233.838692] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6501dea-283c-4935-b3b2-b852f1af5458 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.848132] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cdd986-bcfd-1710-0aef-e9593ff6e798/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1233.848421] env[65503]: ERROR oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cdd986-bcfd-1710-0aef-e9593ff6e798/disk-0.vmdk due to incomplete transfer. [ 1233.848781] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6f8d5c65-c2a3-4d56-a4ee-2377591d2d98 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.857848] env[65503]: DEBUG oslo_vmware.rw_handles [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cdd986-bcfd-1710-0aef-e9593ff6e798/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1233.858095] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Uploaded image 07b5b971-b74b-4847-966b-a592b53a880d to the Glance image server {{(pid=65503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1233.860593] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Destroying the VM {{(pid=65503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1233.860848] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7ec9bd19-7b50-40b3-98d8-5a08d1c97ef7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.868167] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1233.868167] env[65503]: value = "task-4450944" [ 1233.868167] env[65503]: _type = "Task" [ 1233.868167] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.876826] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450944, 'name': Destroy_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.010014] env[65503]: DEBUG nova.compute.manager [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1234.010353] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1234.011358] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cccecd-a3f4-4305-a88f-194a7d3bf22a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.019260] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.019537] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-633e0aef-a271-41fd-9a73-66a35ddaf882 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.026930] env[65503]: DEBUG oslo_vmware.api [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1234.026930] env[65503]: value = "task-4450945" [ 1234.026930] env[65503]: _type = "Task" [ 1234.026930] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.037731] env[65503]: DEBUG oslo_vmware.api [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.379457] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450944, 'name': Destroy_Task, 'duration_secs': 0.427546} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.379772] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Destroyed the VM [ 1234.380055] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Deleting Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1234.380338] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-95c27d01-efb7-42d2-b792-fe7f7726527d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.387513] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1234.387513] env[65503]: value = "task-4450946" [ 1234.387513] env[65503]: _type = "Task" [ 1234.387513] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.397310] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450946, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.537195] env[65503]: DEBUG oslo_vmware.api [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450945, 'name': PowerOffVM_Task, 'duration_secs': 0.227008} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.537568] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1234.537568] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1234.537917] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-843e585a-5699-4b5c-8040-afeac46914ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.624961] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1234.625201] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1234.625404] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleting the datastore file [datastore1] afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1234.625712] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-354867e8-e16f-4781-b1cd-d6274e5e9999 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.633367] env[65503]: DEBUG oslo_vmware.api [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1234.633367] env[65503]: value = "task-4450948" [ 1234.633367] env[65503]: _type = "Task" [ 1234.633367] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.642339] env[65503]: DEBUG oslo_vmware.api [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.900176] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450946, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.971544] env[65503]: DEBUG nova.compute.manager [req-3ad3f0ed-afa8-46ad-9b52-101541f85c33 req-6d8f3ef5-4a51-4af7-bbf2-ffb231888822 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-vif-plugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1234.971834] env[65503]: DEBUG oslo_concurrency.lockutils [req-3ad3f0ed-afa8-46ad-9b52-101541f85c33 req-6d8f3ef5-4a51-4af7-bbf2-ffb231888822 service nova] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.972551] env[65503]: DEBUG oslo_concurrency.lockutils [req-3ad3f0ed-afa8-46ad-9b52-101541f85c33 req-6d8f3ef5-4a51-4af7-bbf2-ffb231888822 service nova] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.972551] env[65503]: DEBUG oslo_concurrency.lockutils [req-3ad3f0ed-afa8-46ad-9b52-101541f85c33 req-6d8f3ef5-4a51-4af7-bbf2-ffb231888822 service nova] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.972551] env[65503]: DEBUG nova.compute.manager [req-3ad3f0ed-afa8-46ad-9b52-101541f85c33 req-6d8f3ef5-4a51-4af7-bbf2-ffb231888822 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] No waiting events found dispatching network-vif-plugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1234.972808] env[65503]: WARNING nova.compute.manager [req-3ad3f0ed-afa8-46ad-9b52-101541f85c33 req-6d8f3ef5-4a51-4af7-bbf2-ffb231888822 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received unexpected event network-vif-plugged-c7da6c50-239a-487b-91ca-3e82cb4d3794 for instance with vm_state shelved_offloaded and task_state spawning. [ 1235.031577] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.031740] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.031921] env[65503]: DEBUG nova.network.neutron [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1235.143581] env[65503]: DEBUG oslo_vmware.api [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.359197} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.143835] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1235.144056] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1235.144238] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1235.144409] env[65503]: INFO nova.compute.manager [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1235.144653] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1235.144894] env[65503]: DEBUG nova.compute.manager [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1235.144991] env[65503]: DEBUG nova.network.neutron [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1235.145256] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1235.145786] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1235.146064] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1235.183353] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1235.399845] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450946, 'name': RemoveSnapshot_Task, 'duration_secs': 0.6088} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.400213] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Deleted Snapshot of the VM instance {{(pid=65503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1235.400550] env[65503]: DEBUG nova.compute.manager [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1235.401491] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f66ce5c-1a24-4a90-8a75-26f4495c8ea2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.535138] env[65503]: WARNING neutronclient.v2_0.client [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1235.535878] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1235.536237] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1235.659522] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1235.659941] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1235.724142] env[65503]: WARNING neutronclient.v2_0.client [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1235.724852] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1235.725211] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1235.818063] env[65503]: DEBUG nova.network.neutron [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7da6c50-23", "ovs_interfaceid": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1235.915769] env[65503]: INFO nova.compute.manager [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Shelve offloading [ 1236.147554] env[65503]: DEBUG nova.network.neutron [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1236.321994] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.351546] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='85a31ad6573accc5446a97f65fc58d20',container_format='bare',created_at=2025-11-14T15:57:56Z,direct_url=,disk_format='vmdk',id=22368b12-a541-49dd-98b8-5368ea101029,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1646701057-shelved',owner='592efb180976432cbcecb9ad421e1bd1',properties=ImageMetaProps,protected=,size=31662592,status='active',tags=,updated_at=2025-11-14T15:58:11Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1236.351787] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.351942] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1236.352127] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.352268] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1236.352469] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1236.352604] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1236.352751] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1236.352911] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1236.353074] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1236.353243] env[65503]: DEBUG nova.virt.hardware [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1236.354124] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34a5ffc-6949-4a6d-8a47-11fd173664cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.362921] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b3c359-570c-4807-b268-723f76ad45f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.377825] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:5c:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7da6c50-239a-487b-91ca-3e82cb4d3794', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1236.386056] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1236.386413] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1236.386709] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-541bbe43-d21d-4b4d-884a-a1c39753a334 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.407910] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1236.407910] env[65503]: value = "task-4450949" [ 1236.407910] env[65503]: _type = "Task" [ 1236.407910] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.417840] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450949, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.419585] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.420330] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efabe0ee-58ac-4525-9fba-62fbeae44b64 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.428864] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1236.428864] env[65503]: value = "task-4450950" [ 1236.428864] env[65503]: _type = "Task" [ 1236.428864] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.440333] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] VM already powered off {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1236.440566] env[65503]: DEBUG nova.compute.manager [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1236.441423] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b525c6b3-58fb-4423-aa1a-3754c68fda5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.449259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.449477] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.449628] env[65503]: DEBUG nova.network.neutron [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1236.650817] env[65503]: INFO nova.compute.manager [-] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Took 1.51 seconds to deallocate network for instance. [ 1236.919076] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450949, 'name': CreateVM_Task, 'duration_secs': 0.304021} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.919437] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1236.919689] env[65503]: WARNING neutronclient.v2_0.client [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1236.920344] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1236.920687] env[65503]: WARNING openstack [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1236.995050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.995050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.995050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1236.995796] env[65503]: WARNING neutronclient.v2_0.client [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1236.996414] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1236.996801] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.003407] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df45143e-0def-4ae6-b8b0-f557a4720592 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.007954] env[65503]: DEBUG nova.compute.manager [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1237.008210] env[65503]: DEBUG nova.compute.manager [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing instance network info cache due to event network-changed-c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1237.008428] env[65503]: DEBUG oslo_concurrency.lockutils [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Acquiring lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.008563] env[65503]: DEBUG oslo_concurrency.lockutils [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Acquired lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.008713] env[65503]: DEBUG nova.network.neutron [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Refreshing network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1237.014347] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1237.014347] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52741931-d8e6-d1b0-23cc-18bd2be568e7" [ 1237.014347] env[65503]: _type = "Task" [ 1237.014347] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.024510] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52741931-d8e6-d1b0-23cc-18bd2be568e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.116628] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1237.117073] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.157421] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.157704] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.157916] env[65503]: DEBUG nova.objects.instance [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'resources' on Instance uuid afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.176216] env[65503]: WARNING neutronclient.v2_0.client [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1237.176890] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1237.177248] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.259477] env[65503]: DEBUG nova.network.neutron [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf896d088-0a", "ovs_interfaceid": "f896d088-0ab2-44cc-a26c-6593c073467c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1237.511534] env[65503]: WARNING neutronclient.v2_0.client [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1237.512258] env[65503]: WARNING openstack [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1237.512659] env[65503]: WARNING openstack [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.532846] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.533112] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Processing image 22368b12-a541-49dd-98b8-5368ea101029 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1237.533366] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029/22368b12-a541-49dd-98b8-5368ea101029.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.533545] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029/22368b12-a541-49dd-98b8-5368ea101029.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.533719] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1237.533966] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86dd9bec-8814-41f8-a39a-be088a3f4dcc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.543583] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1237.543767] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1237.544544] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cedf24d6-266c-4ab2-bac1-58d2946a2190 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.550639] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1237.550639] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5219a398-1c52-a638-6e86-256540ea13d0" [ 1237.550639] env[65503]: _type = "Task" [ 1237.550639] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.559100] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5219a398-1c52-a638-6e86-256540ea13d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.632802] env[65503]: WARNING openstack [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1237.633223] env[65503]: WARNING openstack [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.703511] env[65503]: WARNING neutronclient.v2_0.client [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1237.704171] env[65503]: WARNING openstack [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1237.704518] env[65503]: WARNING openstack [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.755180] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10991329-7b21-4b3c-8b73-3892ab237bb2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.762929] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.763354] env[65503]: WARNING neutronclient.v2_0.client [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1237.763965] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1237.764333] env[65503]: WARNING openstack [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.769323] env[65503]: WARNING neutronclient.v2_0.client [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1237.772128] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003276e8-a9a2-4b29-af4a-2e2c16de5e6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.809057] env[65503]: DEBUG nova.network.neutron [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updated VIF entry in instance network info cache for port c7da6c50-239a-487b-91ca-3e82cb4d3794. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1237.809463] env[65503]: DEBUG nova.network.neutron [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [{"id": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "address": "fa:16:3e:23:5c:c4", "network": {"id": "997076b2-693b-4d42-9279-626006ea5699", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1015296409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "592efb180976432cbcecb9ad421e1bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7da6c50-23", "ovs_interfaceid": "c7da6c50-239a-487b-91ca-3e82cb4d3794", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1237.811184] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361431dd-8332-4c29-9a78-bbb533cf9d76 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.820736] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a16ab7-659f-470b-9d0e-7889764df9be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.837229] env[65503]: DEBUG nova.compute.provider_tree [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.016380] env[65503]: DEBUG nova.compute.manager [req-07652691-a853-43e5-8e77-3529c5a7851d req-d890c28d-ea98-4547-8309-35d2e6aa3f5e service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-vif-unplugged-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1238.016380] env[65503]: DEBUG oslo_concurrency.lockutils [req-07652691-a853-43e5-8e77-3529c5a7851d req-d890c28d-ea98-4547-8309-35d2e6aa3f5e service nova] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.016710] env[65503]: DEBUG oslo_concurrency.lockutils [req-07652691-a853-43e5-8e77-3529c5a7851d req-d890c28d-ea98-4547-8309-35d2e6aa3f5e service nova] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.016822] env[65503]: DEBUG oslo_concurrency.lockutils [req-07652691-a853-43e5-8e77-3529c5a7851d req-d890c28d-ea98-4547-8309-35d2e6aa3f5e service nova] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.016994] env[65503]: DEBUG nova.compute.manager [req-07652691-a853-43e5-8e77-3529c5a7851d req-d890c28d-ea98-4547-8309-35d2e6aa3f5e service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] No waiting events found dispatching network-vif-unplugged-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1238.017180] env[65503]: WARNING nova.compute.manager [req-07652691-a853-43e5-8e77-3529c5a7851d req-d890c28d-ea98-4547-8309-35d2e6aa3f5e service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received unexpected event network-vif-unplugged-f896d088-0ab2-44cc-a26c-6593c073467c for instance with vm_state shelved and task_state shelving_offloading. [ 1238.062213] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Preparing fetch location {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1238.062573] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Fetch image to [datastore2] OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc/OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc.vmdk {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1238.062769] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Downloading stream optimized image 22368b12-a541-49dd-98b8-5368ea101029 to [datastore2] OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc/OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc.vmdk on the data store datastore2 as vApp {{(pid=65503) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1238.062943] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Downloading image file data 22368b12-a541-49dd-98b8-5368ea101029 to the ESX as VM named 'OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc' {{(pid=65503) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1238.107948] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1238.109137] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6069749b-a6e7-4ad0-9441-bd30666b5579 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.116951] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1238.118956] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08557cee-df1d-43d4-8650-8459ff6a672c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.141123] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1238.141123] env[65503]: value = "resgroup-9" [ 1238.141123] env[65503]: _type = "ResourcePool" [ 1238.141123] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1238.141441] env[65503]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-1d04842b-c316-43c5-aea6-ebb73f11174a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.164687] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease: (returnval){ [ 1238.164687] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524a0054-64f4-c99e-52ef-f5689490dad6" [ 1238.164687] env[65503]: _type = "HttpNfcLease" [ 1238.164687] env[65503]: } obtained for vApp import into resource pool (val){ [ 1238.164687] env[65503]: value = "resgroup-9" [ 1238.164687] env[65503]: _type = "ResourcePool" [ 1238.164687] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1238.165075] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the lease: (returnval){ [ 1238.165075] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524a0054-64f4-c99e-52ef-f5689490dad6" [ 1238.165075] env[65503]: _type = "HttpNfcLease" [ 1238.165075] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1238.172722] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1238.172722] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524a0054-64f4-c99e-52ef-f5689490dad6" [ 1238.172722] env[65503]: _type = "HttpNfcLease" [ 1238.172722] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1238.191190] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1238.191450] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1238.191833] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleting the datastore file [datastore1] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.192258] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96fb9086-a65d-4514-85f1-24913ff42320 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.200475] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1238.200475] env[65503]: value = "task-4450953" [ 1238.200475] env[65503]: _type = "Task" [ 1238.200475] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.210241] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.316090] env[65503]: DEBUG oslo_concurrency.lockutils [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] Releasing lock "refresh_cache-fb2dddac-4ac0-498a-b972-e61255833ad0" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.316362] env[65503]: DEBUG nova.compute.manager [req-1dd1030c-1163-4058-9de8-908be12797ec req-c6625758-e9aa-4bd9-82be-452c4fc1f7dd service nova] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Received event network-vif-deleted-409dc91c-742d-4c6a-ac08-bcf3fcb7e5d2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1238.340875] env[65503]: DEBUG nova.scheduler.client.report [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.673561] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1238.673561] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524a0054-64f4-c99e-52ef-f5689490dad6" [ 1238.673561] env[65503]: _type = "HttpNfcLease" [ 1238.673561] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1238.673900] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1238.673900] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524a0054-64f4-c99e-52ef-f5689490dad6" [ 1238.673900] env[65503]: _type = "HttpNfcLease" [ 1238.673900] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1238.674653] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7766f8-657d-4165-853f-8fd2b7edde52 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.682405] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5263e03e-d22a-0136-4a7f-23ad1f53e836/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1238.682585] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating HTTP connection to write to file with size = 31662592 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5263e03e-d22a-0136-4a7f-23ad1f53e836/disk-0.vmdk. {{(pid=65503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1238.749684] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8db4c39f-dc4c-4751-9653-8e9ce8008a74 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.751044] env[65503]: DEBUG oslo_vmware.api [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147208} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.752535] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.752728] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1238.752904] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1238.778035] env[65503]: INFO nova.scheduler.client.report [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted allocations for instance 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 [ 1238.846528] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.864898] env[65503]: INFO nova.scheduler.client.report [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted allocations for instance afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad [ 1239.283920] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.284475] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.284676] env[65503]: DEBUG nova.objects.instance [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'resources' on Instance uuid 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.373887] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd10577c-3410-49fe-9235-c4e6a52236f6 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.871s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.787989] env[65503]: DEBUG nova.objects.instance [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'numa_topology' on Instance uuid 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.880880] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Completed reading data from the image iterator. {{(pid=65503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1239.881278] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5263e03e-d22a-0136-4a7f-23ad1f53e836/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1239.882390] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140a71e0-f692-477b-872f-946b96312a2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.892126] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5263e03e-d22a-0136-4a7f-23ad1f53e836/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1239.892381] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5263e03e-d22a-0136-4a7f-23ad1f53e836/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1239.892803] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-cb452afe-b2cb-4a1a-84be-8d620faa8054 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.050440] env[65503]: DEBUG nova.compute.manager [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1240.050702] env[65503]: DEBUG nova.compute.manager [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing instance network info cache due to event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1240.050949] env[65503]: DEBUG oslo_concurrency.lockutils [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Acquiring lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.051378] env[65503]: DEBUG oslo_concurrency.lockutils [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Acquired lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.051566] env[65503]: DEBUG nova.network.neutron [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1240.099228] env[65503]: DEBUG oslo_vmware.rw_handles [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5263e03e-d22a-0136-4a7f-23ad1f53e836/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1240.099572] env[65503]: INFO nova.virt.vmwareapi.images [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Downloaded image file data 22368b12-a541-49dd-98b8-5368ea101029 [ 1240.100385] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203c1934-60cb-4620-98d7-593d8c2eb025 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.120378] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c52caf93-6441-4612-8e09-816b9cd3b381 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.146416] env[65503]: INFO nova.virt.vmwareapi.images [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] The imported VM was unregistered [ 1240.149168] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Caching image {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1240.149410] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Creating directory with path [datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.149702] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69434b52-c1dd-4c64-a193-62e8227309c0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.162367] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Created directory with path [datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029 {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.162615] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc/OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc.vmdk to [datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029/22368b12-a541-49dd-98b8-5368ea101029.vmdk. {{(pid=65503) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1240.163012] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-de28d0db-2b50-439c-8883-47bec9c9d780 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.171627] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1240.171627] env[65503]: value = "task-4450955" [ 1240.171627] env[65503]: _type = "Task" [ 1240.171627] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.181817] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450955, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.290815] env[65503]: DEBUG nova.objects.base [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Object Instance<6d11d79b-b11f-4a31-a4e3-aa5b3346ae17> lazy-loaded attributes: resources,numa_topology {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1240.361616] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce99ef5-4c09-4712-a3a7-408b2d19c6a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.371849] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9f9dd1-560c-41f1-a43a-42ccd8f46140 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.411375] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e78bb6-bc71-497b-b56f-01ee9f298ce1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.421274] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70dc32c-18bc-46fa-a1d0-589cfc889243 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.438748] env[65503]: DEBUG nova.compute.provider_tree [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.554289] env[65503]: WARNING neutronclient.v2_0.client [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1240.555080] env[65503]: WARNING openstack [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1240.555435] env[65503]: WARNING openstack [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1240.685298] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450955, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.688565] env[65503]: WARNING openstack [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1240.688955] env[65503]: WARNING openstack [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1240.758160] env[65503]: WARNING neutronclient.v2_0.client [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1240.758913] env[65503]: WARNING openstack [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1240.759380] env[65503]: WARNING openstack [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1240.846520] env[65503]: DEBUG nova.network.neutron [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updated VIF entry in instance network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1240.846998] env[65503]: DEBUG nova.network.neutron [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf896d088-0a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1240.942985] env[65503]: DEBUG nova.scheduler.client.report [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1241.027682] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.183108] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450955, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.349799] env[65503]: DEBUG oslo_concurrency.lockutils [req-ee29a316-eaf2-4ff7-98ce-6b2d8e6b6a86 req-b10fd0bd-7ac0-475b-b306-d185bdb3d1d5 service nova] Releasing lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1241.420522] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.420750] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.448177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.686263] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450955, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.924020] env[65503]: DEBUG nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1241.957596] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5ed097c0-711c-4ee3-86f9-d758e395b3c5 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.105s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.958877] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.931s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.959117] env[65503]: INFO nova.compute.manager [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Unshelving [ 1242.189095] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450955, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.448621] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.449136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.450669] env[65503]: INFO nova.compute.claims [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1242.684425] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450955, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.980711] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.185237] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450955, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.594849} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.185449] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc/OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc.vmdk to [datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029/22368b12-a541-49dd-98b8-5368ea101029.vmdk. [ 1243.185634] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Cleaning up location [datastore2] OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1243.185790] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_acb0e49c-e87d-4a19-aeca-a7d006f542cc {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1243.186091] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57ab1d2a-0987-4f68-80f8-d42f693d00ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.193107] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1243.193107] env[65503]: value = "task-4450956" [ 1243.193107] env[65503]: _type = "Task" [ 1243.193107] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.201442] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.541433] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801491a2-271f-42f4-acf1-80f075a27624 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.551796] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce61e648-51a7-4b29-a64b-306e9e3dbf15 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.582260] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbaf546f-a032-4065-bf0d-ccc573f19dfc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.590138] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405ef8a9-d6d9-462b-a9e5-2a914a59b437 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.603953] env[65503]: DEBUG nova.compute.provider_tree [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.704343] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035675} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.704669] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1243.704865] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029/22368b12-a541-49dd-98b8-5368ea101029.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.705134] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029/22368b12-a541-49dd-98b8-5368ea101029.vmdk to [datastore2] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1243.705402] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfc025ed-ce8e-4418-b4d2-2bbec420ad95 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.713687] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1243.713687] env[65503]: value = "task-4450957" [ 1243.713687] env[65503]: _type = "Task" [ 1243.713687] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.722827] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.108027] env[65503]: DEBUG nova.scheduler.client.report [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1244.225523] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450957, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.614097] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.165s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.614736] env[65503]: DEBUG nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1244.617613] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.637s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.617864] env[65503]: DEBUG nova.objects.instance [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'pci_requests' on Instance uuid 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1244.728908] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450957, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.122051] env[65503]: DEBUG nova.compute.utils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1245.124373] env[65503]: DEBUG nova.objects.instance [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'numa_topology' on Instance uuid 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1245.125553] env[65503]: DEBUG nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1245.125744] env[65503]: DEBUG nova.network.neutron [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1245.126109] env[65503]: WARNING neutronclient.v2_0.client [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1245.126396] env[65503]: WARNING neutronclient.v2_0.client [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1245.127046] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1245.127391] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1245.136382] env[65503]: INFO nova.compute.claims [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1245.176354] env[65503]: DEBUG nova.policy [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9955668c2464ddfb0eae34aa700ddd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '521d40776571452e85178972f97c8622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1245.227195] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450957, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.490767] env[65503]: DEBUG nova.network.neutron [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Successfully created port: b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1245.627931] env[65503]: DEBUG nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1245.727862] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450957, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.224209] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dc9901-cbc7-4c55-be54-c9b46ff4cec3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.235528] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d8a6aa-192a-4ca9-8061-44e56f8af657 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.239282] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450957, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.292826} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.239561] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/22368b12-a541-49dd-98b8-5368ea101029/22368b12-a541-49dd-98b8-5368ea101029.vmdk to [datastore2] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1246.240661] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a38357-0fc1-4cb2-9047-3047b82df39b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.270088] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de47e62-3aa2-4acf-a74f-763a61ad79ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.290707] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1246.291504] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c92989e7-a2e7-44c5-a613-e4bfc78b2a2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.310191] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ef2afe-3fd5-4c61-8cfd-9987ddfadebc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.315553] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1246.315553] env[65503]: value = "task-4450958" [ 1246.315553] env[65503]: _type = "Task" [ 1246.315553] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.328133] env[65503]: DEBUG nova.compute.provider_tree [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.336582] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.638682] env[65503]: DEBUG nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1246.670922] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1246.671209] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1246.671363] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1246.671545] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1246.671684] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1246.671824] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1246.672035] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1246.672193] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1246.672354] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1246.672511] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1246.672681] env[65503]: DEBUG nova.virt.hardware [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1246.673589] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d3e4ea-024e-4286-88c5-a993f572c7ad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.682214] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca535c7-f0ee-46cd-88fb-bfc1fb53c5c0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.826197] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450958, 'name': ReconfigVM_Task, 'duration_secs': 0.268936} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.826525] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Reconfigured VM instance instance-0000006e to attach disk [datastore2] fb2dddac-4ac0-498a-b972-e61255833ad0/fb2dddac-4ac0-498a-b972-e61255833ad0.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1246.827314] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a42706c0-6a44-42ea-a07e-d4e82f9112cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.832036] env[65503]: DEBUG nova.scheduler.client.report [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.836318] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1246.836318] env[65503]: value = "task-4450959" [ 1246.836318] env[65503]: _type = "Task" [ 1246.836318] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.846278] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450959, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.950263] env[65503]: DEBUG nova.compute.manager [req-52412242-2228-4f5e-b85e-d8e4b6113c20 req-fac564c3-b0cc-458f-9321-f5d796cb59dd service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Received event network-vif-plugged-b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1246.950556] env[65503]: DEBUG oslo_concurrency.lockutils [req-52412242-2228-4f5e-b85e-d8e4b6113c20 req-fac564c3-b0cc-458f-9321-f5d796cb59dd service nova] Acquiring lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.950699] env[65503]: DEBUG oslo_concurrency.lockutils [req-52412242-2228-4f5e-b85e-d8e4b6113c20 req-fac564c3-b0cc-458f-9321-f5d796cb59dd service nova] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.950880] env[65503]: DEBUG oslo_concurrency.lockutils [req-52412242-2228-4f5e-b85e-d8e4b6113c20 req-fac564c3-b0cc-458f-9321-f5d796cb59dd service nova] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.951056] env[65503]: DEBUG nova.compute.manager [req-52412242-2228-4f5e-b85e-d8e4b6113c20 req-fac564c3-b0cc-458f-9321-f5d796cb59dd service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] No waiting events found dispatching network-vif-plugged-b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1246.951221] env[65503]: WARNING nova.compute.manager [req-52412242-2228-4f5e-b85e-d8e4b6113c20 req-fac564c3-b0cc-458f-9321-f5d796cb59dd service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Received unexpected event network-vif-plugged-b9bb3a31-7ee2-4644-8b62-570a11847efa for instance with vm_state building and task_state spawning. [ 1247.056451] env[65503]: DEBUG nova.network.neutron [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Successfully updated port: b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1247.337587] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.720s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.337983] env[65503]: WARNING neutronclient.v2_0.client [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1247.350043] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450959, 'name': Rename_Task, 'duration_secs': 0.135676} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.350312] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1247.350562] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2878bf7d-39c4-4e56-a2d1-b6ef938ff6dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.356901] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1247.356901] env[65503]: value = "task-4450960" [ 1247.356901] env[65503]: _type = "Task" [ 1247.356901] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.366424] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450960, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.370155] env[65503]: INFO nova.network.neutron [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating port f896d088-0ab2-44cc-a26c-6593c073467c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1247.559782] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.559991] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.560188] env[65503]: DEBUG nova.network.neutron [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1247.868391] env[65503]: DEBUG oslo_vmware.api [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450960, 'name': PowerOnVM_Task, 'duration_secs': 0.44655} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.868708] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1247.967853] env[65503]: DEBUG nova.compute.manager [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1247.968971] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1782ab0e-cc04-4d80-b7fd-5f39386b660a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.063110] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1248.063516] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1248.099126] env[65503]: DEBUG nova.network.neutron [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1248.116913] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1248.117334] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1248.173323] env[65503]: WARNING neutronclient.v2_0.client [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1248.173972] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1248.174330] env[65503]: WARNING openstack [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1248.258036] env[65503]: DEBUG nova.network.neutron [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Updating instance_info_cache with network_info: [{"id": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "address": "fa:16:3e:33:97:8f", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9bb3a31-7e", "ovs_interfaceid": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1248.487628] env[65503]: DEBUG oslo_concurrency.lockutils [None req-519d86fc-4108-41a5-a95b-386b737303d6 tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.230s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.760978] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.761450] env[65503]: DEBUG nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Instance network_info: |[{"id": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "address": "fa:16:3e:33:97:8f", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9bb3a31-7e", "ovs_interfaceid": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1248.762109] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:97:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a7ba8d0-0208-4af7-af44-2a5ad382f9be', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9bb3a31-7ee2-4644-8b62-570a11847efa', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1248.769963] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1248.770217] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1248.770456] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7451adbc-66af-4f40-852e-c556d72d5a8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.791583] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1248.791583] env[65503]: value = "task-4450961" [ 1248.791583] env[65503]: _type = "Task" [ 1248.791583] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.800476] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450961, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.802771] env[65503]: DEBUG nova.compute.manager [req-81e8ebe7-d550-4dcc-aca2-da15bc4228c2 req-09f45d53-e21b-4b6d-b4df-f85b913454fe service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-vif-plugged-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1248.803014] env[65503]: DEBUG oslo_concurrency.lockutils [req-81e8ebe7-d550-4dcc-aca2-da15bc4228c2 req-09f45d53-e21b-4b6d-b4df-f85b913454fe service nova] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.803228] env[65503]: DEBUG oslo_concurrency.lockutils [req-81e8ebe7-d550-4dcc-aca2-da15bc4228c2 req-09f45d53-e21b-4b6d-b4df-f85b913454fe service nova] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.803400] env[65503]: DEBUG oslo_concurrency.lockutils [req-81e8ebe7-d550-4dcc-aca2-da15bc4228c2 req-09f45d53-e21b-4b6d-b4df-f85b913454fe service nova] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.803560] env[65503]: DEBUG nova.compute.manager [req-81e8ebe7-d550-4dcc-aca2-da15bc4228c2 req-09f45d53-e21b-4b6d-b4df-f85b913454fe service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] No waiting events found dispatching network-vif-plugged-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1248.803726] env[65503]: WARNING nova.compute.manager [req-81e8ebe7-d550-4dcc-aca2-da15bc4228c2 req-09f45d53-e21b-4b6d-b4df-f85b913454fe service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received unexpected event network-vif-plugged-f896d088-0ab2-44cc-a26c-6593c073467c for instance with vm_state shelved_offloaded and task_state spawning. [ 1248.895885] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.897751] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.897751] env[65503]: DEBUG nova.network.neutron [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1248.988509] env[65503]: DEBUG nova.compute.manager [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Received event network-changed-b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1248.988784] env[65503]: DEBUG nova.compute.manager [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Refreshing instance network info cache due to event network-changed-b9bb3a31-7ee2-4644-8b62-570a11847efa. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1248.989140] env[65503]: DEBUG oslo_concurrency.lockutils [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Acquiring lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.989331] env[65503]: DEBUG oslo_concurrency.lockutils [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Acquired lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.989569] env[65503]: DEBUG nova.network.neutron [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Refreshing network info cache for port b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1249.302369] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450961, 'name': CreateVM_Task, 'duration_secs': 0.29656} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.302546] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1249.303058] env[65503]: WARNING neutronclient.v2_0.client [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1249.303448] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.303600] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.303926] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1249.304203] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-910b1834-acd9-46d7-ab03-a4593f72abaf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.308944] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1249.308944] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]528082dc-d495-7095-9b66-4dccc461c0cc" [ 1249.308944] env[65503]: _type = "Task" [ 1249.308944] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.316950] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528082dc-d495-7095-9b66-4dccc461c0cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.399869] env[65503]: WARNING neutronclient.v2_0.client [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1249.400767] env[65503]: WARNING openstack [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.401140] env[65503]: WARNING openstack [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.492204] env[65503]: WARNING neutronclient.v2_0.client [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1249.492906] env[65503]: WARNING openstack [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.493288] env[65503]: WARNING openstack [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.535273] env[65503]: WARNING openstack [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.535753] env[65503]: WARNING openstack [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.592411] env[65503]: WARNING openstack [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.592863] env[65503]: WARNING openstack [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.620657] env[65503]: WARNING neutronclient.v2_0.client [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1249.621325] env[65503]: WARNING openstack [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.621671] env[65503]: WARNING openstack [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.663758] env[65503]: WARNING neutronclient.v2_0.client [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1249.664424] env[65503]: WARNING openstack [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.664775] env[65503]: WARNING openstack [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.716205] env[65503]: DEBUG nova.network.neutron [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf896d088-0a", "ovs_interfaceid": "f896d088-0ab2-44cc-a26c-6593c073467c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1249.754327] env[65503]: DEBUG nova.network.neutron [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Updated VIF entry in instance network info cache for port b9bb3a31-7ee2-4644-8b62-570a11847efa. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1249.754920] env[65503]: DEBUG nova.network.neutron [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Updating instance_info_cache with network_info: [{"id": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "address": "fa:16:3e:33:97:8f", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9bb3a31-7e", "ovs_interfaceid": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1249.821227] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]528082dc-d495-7095-9b66-4dccc461c0cc, 'name': SearchDatastore_Task, 'duration_secs': 0.010229} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.821520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.821753] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1249.821995] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.822158] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.822338] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.822620] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ddac72b-a858-4876-94de-350f39404e56 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.832780] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.832946] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1249.833708] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1cc4724-b0d3-4728-944d-b1ae1cd1b9b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.840201] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1249.840201] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524b0872-0719-3079-8305-693d35d5c106" [ 1249.840201] env[65503]: _type = "Task" [ 1249.840201] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.848616] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524b0872-0719-3079-8305-693d35d5c106, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.218904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.249690] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='593644a990a9208ea65b96c0b6b1887d',container_format='bare',created_at=2025-11-14T15:58:10Z,direct_url=,disk_format='vmdk',id=07b5b971-b74b-4847-966b-a592b53a880d,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-715863477-shelved',owner='3658921b747e4d78a2046b838cb36d26',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-11-14T15:58:23Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1250.249945] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.250123] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1250.250313] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.250454] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1250.250598] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1250.250802] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1250.250956] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1250.251135] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1250.251293] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1250.251459] env[65503]: DEBUG nova.virt.hardware [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1250.252396] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4244b394-8064-4945-847f-67bbf980f774 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.258016] env[65503]: DEBUG oslo_concurrency.lockutils [req-10408ee0-a103-473b-a9c6-a3a2e6fcf998 req-4a361e0f-50d0-4399-ae5e-d63a97df9cfa service nova] Releasing lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.261701] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9bf898-b7b6-4799-af07-dfac904c1611 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.276731] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:ba:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f896d088-0ab2-44cc-a26c-6593c073467c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1250.284833] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1250.285541] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1250.285820] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e56d7a9-86d8-4fde-897e-8ad111f4954d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.307291] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1250.307291] env[65503]: value = "task-4450962" [ 1250.307291] env[65503]: _type = "Task" [ 1250.307291] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.315761] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450962, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.323414] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.351019] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524b0872-0719-3079-8305-693d35d5c106, 'name': SearchDatastore_Task, 'duration_secs': 0.010255} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.352085] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d95d45a-776f-423e-b49c-0bd4e9010187 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.359123] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1250.359123] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d08214-1f6b-099c-7300-c57b6ef1079c" [ 1250.359123] env[65503]: _type = "Task" [ 1250.359123] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.369018] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d08214-1f6b-099c-7300-c57b6ef1079c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.818356] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450962, 'name': CreateVM_Task, 'duration_secs': 0.284309} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.818544] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1250.819047] env[65503]: WARNING neutronclient.v2_0.client [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1250.819409] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.819562] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.819943] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1250.820209] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b649eb3-2168-4b06-8144-6fdd4b47f271 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.825367] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1250.825367] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b89501-9403-0e7b-3b85-c7dccb42dea7" [ 1250.825367] env[65503]: _type = "Task" [ 1250.825367] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.834168] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b89501-9403-0e7b-3b85-c7dccb42dea7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.836649] env[65503]: DEBUG nova.compute.manager [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1250.836933] env[65503]: DEBUG nova.compute.manager [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing instance network info cache due to event network-changed-f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1250.837198] env[65503]: DEBUG oslo_concurrency.lockutils [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Acquiring lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.837411] env[65503]: DEBUG oslo_concurrency.lockutils [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Acquired lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.837607] env[65503]: DEBUG nova.network.neutron [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Refreshing network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1250.870212] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52d08214-1f6b-099c-7300-c57b6ef1079c, 'name': SearchDatastore_Task, 'duration_secs': 0.010877} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.870466] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.870852] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a1247f14-ebd5-4097-9532-91ddbc9ff8af/a1247f14-ebd5-4097-9532-91ddbc9ff8af.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1250.871047] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4ae6d50-514c-40c0-9eb6-c359d43912b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.878887] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1250.878887] env[65503]: value = "task-4450963" [ 1250.878887] env[65503]: _type = "Task" [ 1250.878887] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.889588] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.337077] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.337486] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Processing image 07b5b971-b74b-4847-966b-a592b53a880d {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1251.337566] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d/07b5b971-b74b-4847-966b-a592b53a880d.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.337705] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d/07b5b971-b74b-4847-966b-a592b53a880d.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.337890] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1251.338258] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-771593de-1a68-4238-bce2-7ff1db02c01d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.340767] env[65503]: WARNING neutronclient.v2_0.client [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1251.341618] env[65503]: WARNING openstack [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1251.341972] env[65503]: WARNING openstack [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1251.359328] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1251.359565] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1251.360384] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36f9c6cd-590f-4c81-853b-3a939d9f71f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.365987] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1251.365987] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527267f3-8a02-7494-d393-0fda2c9760c0" [ 1251.365987] env[65503]: _type = "Task" [ 1251.365987] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.375443] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527267f3-8a02-7494-d393-0fda2c9760c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.388982] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470593} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.389140] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] a1247f14-ebd5-4097-9532-91ddbc9ff8af/a1247f14-ebd5-4097-9532-91ddbc9ff8af.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1251.389361] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1251.389608] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f99923a-4755-4dcf-8c15-3c3abc98f7d0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.396544] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1251.396544] env[65503]: value = "task-4450964" [ 1251.396544] env[65503]: _type = "Task" [ 1251.396544] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.405645] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.450192] env[65503]: WARNING openstack [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1251.450592] env[65503]: WARNING openstack [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1251.509907] env[65503]: WARNING neutronclient.v2_0.client [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1251.510597] env[65503]: WARNING openstack [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1251.510972] env[65503]: WARNING openstack [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1251.592758] env[65503]: DEBUG nova.network.neutron [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updated VIF entry in instance network info cache for port f896d088-0ab2-44cc-a26c-6593c073467c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1251.593164] env[65503]: DEBUG nova.network.neutron [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [{"id": "f896d088-0ab2-44cc-a26c-6593c073467c", "address": "fa:16:3e:79:ba:09", "network": {"id": "e4a8e4c0-fd5c-4548-9323-64b2daf9ed62", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1818936699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3658921b747e4d78a2046b838cb36d26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf896d088-0a", "ovs_interfaceid": "f896d088-0ab2-44cc-a26c-6593c073467c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1251.876594] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Preparing fetch location {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1251.876940] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Fetch image to [datastore2] OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4/OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4.vmdk {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1251.877199] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Downloading stream optimized image 07b5b971-b74b-4847-966b-a592b53a880d to [datastore2] OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4/OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4.vmdk on the data store datastore2 as vApp {{(pid=65503) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1251.877403] env[65503]: DEBUG nova.virt.vmwareapi.images [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Downloading image file data 07b5b971-b74b-4847-966b-a592b53a880d to the ESX as VM named 'OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4' {{(pid=65503) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1251.923461] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.299383} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.923662] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1251.924458] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6bb71e-1bb4-43ef-a25d-4bf8d452ed7f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.946713] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] a1247f14-ebd5-4097-9532-91ddbc9ff8af/a1247f14-ebd5-4097-9532-91ddbc9ff8af.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1251.948966] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d82d8561-6a61-4f29-929b-c37880ff810f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.969637] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1251.969637] env[65503]: value = "task-4450965" [ 1251.969637] env[65503]: _type = "Task" [ 1251.969637] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.978393] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.983598] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1251.983598] env[65503]: value = "resgroup-9" [ 1251.983598] env[65503]: _type = "ResourcePool" [ 1251.983598] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1251.983858] env[65503]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-8151895c-9cec-4354-b59c-df18f96f6744 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.004817] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease: (returnval){ [ 1252.004817] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526420ff-56de-17d8-5238-6ff7c8b062c3" [ 1252.004817] env[65503]: _type = "HttpNfcLease" [ 1252.004817] env[65503]: } obtained for vApp import into resource pool (val){ [ 1252.004817] env[65503]: value = "resgroup-9" [ 1252.004817] env[65503]: _type = "ResourcePool" [ 1252.004817] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1252.005198] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the lease: (returnval){ [ 1252.005198] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526420ff-56de-17d8-5238-6ff7c8b062c3" [ 1252.005198] env[65503]: _type = "HttpNfcLease" [ 1252.005198] env[65503]: } to be ready. {{(pid=65503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1252.012313] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1252.012313] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526420ff-56de-17d8-5238-6ff7c8b062c3" [ 1252.012313] env[65503]: _type = "HttpNfcLease" [ 1252.012313] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1252.096247] env[65503]: DEBUG oslo_concurrency.lockutils [req-4482eeea-aab0-4d0e-afe3-02d9d719b620 req-3d2f5b69-ce40-4425-b90b-bd40bf9e4ee5 service nova] Releasing lock "refresh_cache-6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.483204] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450965, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.513051] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1252.513051] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526420ff-56de-17d8-5238-6ff7c8b062c3" [ 1252.513051] env[65503]: _type = "HttpNfcLease" [ 1252.513051] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1252.981683] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450965, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.013670] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1253.013670] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526420ff-56de-17d8-5238-6ff7c8b062c3" [ 1253.013670] env[65503]: _type = "HttpNfcLease" [ 1253.013670] env[65503]: } is initializing. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1253.324068] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.324276] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.482341] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450965, 'name': ReconfigVM_Task, 'duration_secs': 1.174989} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.482636] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Reconfigured VM instance instance-00000071 to attach disk [datastore2] a1247f14-ebd5-4097-9532-91ddbc9ff8af/a1247f14-ebd5-4097-9532-91ddbc9ff8af.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1253.483300] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0f1c0fa-3816-4cd4-8bd1-597319368ea1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.490733] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1253.490733] env[65503]: value = "task-4450967" [ 1253.490733] env[65503]: _type = "Task" [ 1253.490733] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.499361] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450967, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.514656] env[65503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1253.514656] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526420ff-56de-17d8-5238-6ff7c8b062c3" [ 1253.514656] env[65503]: _type = "HttpNfcLease" [ 1253.514656] env[65503]: } is ready. {{(pid=65503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1253.514980] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1253.514980] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526420ff-56de-17d8-5238-6ff7c8b062c3" [ 1253.514980] env[65503]: _type = "HttpNfcLease" [ 1253.514980] env[65503]: }. {{(pid=65503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1253.515845] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8bd809-302f-442c-bbaf-af33ac3eef4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.524816] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c6391a-fa3a-aefd-b021-923e2efb8a3c/disk-0.vmdk from lease info. {{(pid=65503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1253.525050] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c6391a-fa3a-aefd-b021-923e2efb8a3c/disk-0.vmdk. {{(pid=65503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1253.592277] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-01a8fc5f-1cf0-472a-83f2-d14529feeb5a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.001673] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450967, 'name': Rename_Task, 'duration_secs': 0.131463} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.001992] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1254.002304] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0e237ec-8035-4667-83da-8c4a1619a3a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.010476] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1254.010476] env[65503]: value = "task-4450968" [ 1254.010476] env[65503]: _type = "Task" [ 1254.010476] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.019473] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450968, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.324402] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.324604] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1254.521126] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450968, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.951860] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Completed reading data from the image iterator. {{(pid=65503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1254.952272] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c6391a-fa3a-aefd-b021-923e2efb8a3c/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1254.953178] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6826b924-1179-4b31-9f20-2b2892a19f38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.962877] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c6391a-fa3a-aefd-b021-923e2efb8a3c/disk-0.vmdk is in state: ready. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1254.963224] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c6391a-fa3a-aefd-b021-923e2efb8a3c/disk-0.vmdk. {{(pid=65503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1254.963598] env[65503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-39a7800f-2c79-44f7-bc9d-913884a417ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.021951] env[65503]: DEBUG oslo_vmware.api [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450968, 'name': PowerOnVM_Task, 'duration_secs': 0.570622} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.022417] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1255.022526] env[65503]: INFO nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1255.022705] env[65503]: DEBUG nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1255.023704] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f374f8fe-4ee3-4874-8d0e-8871da5b3f9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.406015] env[65503]: DEBUG oslo_vmware.rw_handles [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c6391a-fa3a-aefd-b021-923e2efb8a3c/disk-0.vmdk. {{(pid=65503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1255.406313] env[65503]: INFO nova.virt.vmwareapi.images [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Downloaded image file data 07b5b971-b74b-4847-966b-a592b53a880d [ 1255.407252] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4569e09-f1f9-49f5-b134-d7ccd0a1701c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.424156] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0826f49-9d63-47af-a98d-808b3b3f9601 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.462416] env[65503]: INFO nova.virt.vmwareapi.images [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] The imported VM was unregistered [ 1255.465136] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Caching image {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1255.465371] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Creating directory with path [datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1255.465658] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11bff07f-b52a-46c1-8b15-a772123515fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.479264] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Created directory with path [datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1255.479483] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4/OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4.vmdk to [datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d/07b5b971-b74b-4847-966b-a592b53a880d.vmdk. {{(pid=65503) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1255.479740] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-abe50bc3-a861-4cdf-acb4-b27ff22574dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.487298] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1255.487298] env[65503]: value = "task-4450970" [ 1255.487298] env[65503]: _type = "Task" [ 1255.487298] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.497329] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450970, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.544413] env[65503]: INFO nova.compute.manager [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Took 13.11 seconds to build instance. [ 1256.001416] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450970, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.047306] env[65503]: DEBUG oslo_concurrency.lockutils [None req-55b03ee1-79ed-4235-bf90-9d2a0d8adf49 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.626s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.325069] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.501193] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450970, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.695716] env[65503]: DEBUG nova.compute.manager [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Received event network-changed-b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1256.695984] env[65503]: DEBUG nova.compute.manager [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Refreshing instance network info cache due to event network-changed-b9bb3a31-7ee2-4644-8b62-570a11847efa. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1256.696180] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Acquiring lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.696472] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Acquired lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.696472] env[65503]: DEBUG nova.network.neutron [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Refreshing network info cache for port b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1257.001307] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450970, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.199807] env[65503]: WARNING neutronclient.v2_0.client [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1257.200346] env[65503]: WARNING openstack [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1257.200742] env[65503]: WARNING openstack [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1257.320751] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.323775] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.341877] env[65503]: WARNING openstack [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1257.342041] env[65503]: WARNING openstack [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1257.433210] env[65503]: WARNING neutronclient.v2_0.client [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1257.434143] env[65503]: WARNING openstack [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1257.434590] env[65503]: WARNING openstack [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1257.508453] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450970, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.539404] env[65503]: DEBUG nova.network.neutron [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Updated VIF entry in instance network info cache for port b9bb3a31-7ee2-4644-8b62-570a11847efa. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1257.539937] env[65503]: DEBUG nova.network.neutron [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Updating instance_info_cache with network_info: [{"id": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "address": "fa:16:3e:33:97:8f", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9bb3a31-7e", "ovs_interfaceid": "b9bb3a31-7ee2-4644-8b62-570a11847efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1257.827962] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.828382] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.828620] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.828803] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1257.829818] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29db042-b78f-44ba-b508-6026479b0087 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.841070] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09850a32-678a-41f9-9c17-ebf263ba980f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.859502] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421284e7-9504-495b-bf23-dc407d3efdd8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.870566] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e8b1f8-8c7b-4a4e-adde-2903398908ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.908782] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179679MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1257.908992] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.909209] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.001542] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450970, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.506897} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.001542] env[65503]: INFO nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4/OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4.vmdk to [datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d/07b5b971-b74b-4847-966b-a592b53a880d.vmdk. [ 1258.001773] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Cleaning up location [datastore2] OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1258.001898] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_47fdfc0c-fcb2-4589-a464-00902c761db4 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1258.002193] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c261e323-7a7f-44cf-a4d5-0df6bda08c80 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.010931] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1258.010931] env[65503]: value = "task-4450972" [ 1258.010931] env[65503]: _type = "Task" [ 1258.010931] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.021374] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450972, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.043814] env[65503]: DEBUG oslo_concurrency.lockutils [req-2f86f452-798c-4401-96df-062607d5c738 req-0a270907-2bf7-476d-9a23-4625af35e047 service nova] Releasing lock "refresh_cache-a1247f14-ebd5-4097-9532-91ddbc9ff8af" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.522080] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450972, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040828} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.522080] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1258.522080] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d/07b5b971-b74b-4847-966b-a592b53a880d.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.522080] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d/07b5b971-b74b-4847-966b-a592b53a880d.vmdk to [datastore2] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1258.522080] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8118945-cb80-4b9b-9fd8-4670e1c7ad32 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.529885] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1258.529885] env[65503]: value = "task-4450973" [ 1258.529885] env[65503]: _type = "Task" [ 1258.529885] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.538571] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.939989] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1258.940308] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1258.940308] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance fb2dddac-4ac0-498a-b972-e61255833ad0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1258.940396] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1258.940511] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a1247f14-ebd5-4097-9532-91ddbc9ff8af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1258.940699] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1258.940846] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '5', 'num_vm_active': '4', 'num_task_None': '4', 'num_os_type_None': '5', 'num_proj_521d40776571452e85178972f97c8622': '2', 'io_workload': '0', 'num_proj_93906c603f7a4b18a34fc4b42fb6d6c1': '1', 'num_proj_592efb180976432cbcecb9ad421e1bd1': '1', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '1', 'num_proj_3658921b747e4d78a2046b838cb36d26': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1259.027027] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5bf79c-de25-4608-97b1-c28ceb446cb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.040842] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82494938-7619-4857-8f22-b2624dcc11e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.044067] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450973, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.074888] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481c364e-5bf8-496e-9206-48cd94ffa630 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.083888] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cecbbb9-53e6-4c70-b4ca-1179137979f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.098689] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.540745] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450973, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.602054] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1260.041385] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450973, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.107639] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1260.107930] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.199s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.544762] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450973, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.043780] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450973, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.273448} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.044203] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/07b5b971-b74b-4847-966b-a592b53a880d/07b5b971-b74b-4847-966b-a592b53a880d.vmdk to [datastore2] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1261.044878] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035ebbfe-11b4-497f-a3cc-c68393f2465a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.067624] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1261.067991] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbeb304d-3b73-44f7-ad53-61ddda92fce2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.090129] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1261.090129] env[65503]: value = "task-4450975" [ 1261.090129] env[65503]: _type = "Task" [ 1261.090129] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.098585] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.108227] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.600738] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.613602] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.103015] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450975, 'name': ReconfigVM_Task, 'duration_secs': 0.990476} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.103393] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17/6d11d79b-b11f-4a31-a4e3-aa5b3346ae17.vmdk or device None with type streamOptimized {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1262.103987] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81257a23-6632-4742-980c-da91f9240929 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.111560] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1262.111560] env[65503]: value = "task-4450976" [ 1262.111560] env[65503]: _type = "Task" [ 1262.111560] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.120811] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450976, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.622081] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450976, 'name': Rename_Task, 'duration_secs': 0.231646} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.622377] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1262.622631] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4219330-4ae6-4aa7-92fa-789134f09230 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.630146] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1262.630146] env[65503]: value = "task-4450977" [ 1262.630146] env[65503]: _type = "Task" [ 1262.630146] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.638505] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.141254] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450977, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.642278] env[65503]: DEBUG oslo_vmware.api [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450977, 'name': PowerOnVM_Task, 'duration_secs': 0.582214} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.642551] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1263.746010] env[65503]: DEBUG nova.compute.manager [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1263.747054] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4548fcef-a6a5-4b4c-ba31-c927bbf10736 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.265161] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fbb2b3a0-8fb6-4c2a-85f4-d952f9fca840 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.306s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.434739] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.435154] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.435340] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.435547] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.435725] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.437947] env[65503]: INFO nova.compute.manager [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Terminating instance [ 1265.942635] env[65503]: DEBUG nova.compute.manager [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1265.942875] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1265.943935] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f4d3ed-f257-446b-aab8-68f1d7d36eab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.953094] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1265.953373] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab75b5ab-3f0d-4a5c-b696-2e002c130626 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.960825] env[65503]: DEBUG oslo_vmware.api [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1265.960825] env[65503]: value = "task-4450980" [ 1265.960825] env[65503]: _type = "Task" [ 1265.960825] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.971676] env[65503]: DEBUG oslo_vmware.api [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.160394] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "5babb2a4-c9a1-412c-8fd2-91880037d119" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.160617] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.471081] env[65503]: DEBUG oslo_vmware.api [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450980, 'name': PowerOffVM_Task, 'duration_secs': 0.204588} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.471451] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1266.471451] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1266.471714] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33ca13e1-f39a-4e49-b726-0e3803e5827c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.534960] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1266.535259] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1266.535447] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleting the datastore file [datastore2] 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1266.535717] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5246f473-c5d6-469e-b119-1ea06f0f68ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.543451] env[65503]: DEBUG oslo_vmware.api [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for the task: (returnval){ [ 1266.543451] env[65503]: value = "task-4450982" [ 1266.543451] env[65503]: _type = "Task" [ 1266.543451] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.551972] env[65503]: DEBUG oslo_vmware.api [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.663482] env[65503]: DEBUG nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1267.054083] env[65503]: DEBUG oslo_vmware.api [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Task: {'id': task-4450982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152263} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.054360] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1267.054539] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1267.054708] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1267.054901] env[65503]: INFO nova.compute.manager [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1267.055172] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1267.055376] env[65503]: DEBUG nova.compute.manager [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1267.055470] env[65503]: DEBUG nova.network.neutron [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1267.055710] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1267.056245] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1267.056645] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1267.095039] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1267.188806] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.189140] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.190861] env[65503]: INFO nova.compute.claims [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1267.596653] env[65503]: DEBUG nova.compute.manager [req-64443c8c-1887-4a6b-971e-f6d509351d46 req-e2ed76f3-a7cd-4df1-aa57-ab04e3a1ba77 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Received event network-vif-deleted-f896d088-0ab2-44cc-a26c-6593c073467c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1267.597050] env[65503]: INFO nova.compute.manager [req-64443c8c-1887-4a6b-971e-f6d509351d46 req-e2ed76f3-a7cd-4df1-aa57-ab04e3a1ba77 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Neutron deleted interface f896d088-0ab2-44cc-a26c-6593c073467c; detaching it from the instance and deleting it from the info cache [ 1267.597050] env[65503]: DEBUG nova.network.neutron [req-64443c8c-1887-4a6b-971e-f6d509351d46 req-e2ed76f3-a7cd-4df1-aa57-ab04e3a1ba77 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1268.068738] env[65503]: DEBUG nova.network.neutron [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1268.099653] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e9dad72-9cf9-4f83-bba2-6449449a7907 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.110358] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5d7bdf-c3ab-44ac-bd08-5886834d2aa3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.144173] env[65503]: DEBUG nova.compute.manager [req-64443c8c-1887-4a6b-971e-f6d509351d46 req-e2ed76f3-a7cd-4df1-aa57-ab04e3a1ba77 service nova] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Detach interface failed, port_id=f896d088-0ab2-44cc-a26c-6593c073467c, reason: Instance 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1268.292669] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3b6a86-0110-4b7f-957f-28f8e6275a13 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.301240] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98aa657-6a2c-4d53-9bb0-a4b37ec61046 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.333464] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87beed08-2685-4451-b2b8-59061a8b6666 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.342058] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a41547d-e8c8-43f3-ad59-ce37efaae6b2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.356984] env[65503]: DEBUG nova.compute.provider_tree [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.572653] env[65503]: INFO nova.compute.manager [-] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Took 1.52 seconds to deallocate network for instance. [ 1268.860222] env[65503]: DEBUG nova.scheduler.client.report [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1269.079417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.365532] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.176s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.366127] env[65503]: DEBUG nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1269.369113] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.290s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.369382] env[65503]: DEBUG nova.objects.instance [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lazy-loading 'resources' on Instance uuid 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.872688] env[65503]: DEBUG nova.compute.utils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1269.877805] env[65503]: DEBUG nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1269.879062] env[65503]: DEBUG nova.network.neutron [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1269.879062] env[65503]: WARNING neutronclient.v2_0.client [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1269.879062] env[65503]: WARNING neutronclient.v2_0.client [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1269.880129] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1269.880494] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1269.944734] env[65503]: DEBUG nova.policy [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af1890ab617d443e985db57a798cac5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93906c603f7a4b18a34fc4b42fb6d6c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1269.981671] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218ee56f-e482-45f8-afa9-285c8d63e5a1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.989750] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6281d200-412f-43e8-8e87-824ba7a78fe4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.024178] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33e3c42-81b5-4552-a92f-3153a2e8f581 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.037956] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b1884e-c19d-499a-bbca-660478dce9cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.054567] env[65503]: DEBUG nova.compute.provider_tree [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.272876] env[65503]: DEBUG nova.network.neutron [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Successfully created port: b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1270.379165] env[65503]: DEBUG nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1270.557764] env[65503]: DEBUG nova.scheduler.client.report [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.062436] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.693s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.090759] env[65503]: INFO nova.scheduler.client.report [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Deleted allocations for instance 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17 [ 1271.389436] env[65503]: DEBUG nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1271.416110] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1271.417280] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1271.417280] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1271.417280] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1271.417280] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1271.417280] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1271.417280] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1271.417514] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1271.417610] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1271.417771] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1271.417944] env[65503]: DEBUG nova.virt.hardware [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1271.418867] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b89fd2-018a-4ca1-9dc2-2b33e151b7aa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.428305] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb60966-c1ca-4fa4-a618-972cd8d1d8cb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.599281] env[65503]: DEBUG oslo_concurrency.lockutils [None req-06f1fead-445c-4ef1-88ca-3008b6a74594 tempest-ServerActionsTestOtherB-577581098 tempest-ServerActionsTestOtherB-577581098-project-member] Lock "6d11d79b-b11f-4a31-a4e3-aa5b3346ae17" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.164s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.709734] env[65503]: DEBUG nova.compute.manager [req-29d1efe3-01bc-4553-88d4-55c4201c8407 req-7dee4c2a-94e5-43e0-8652-e8c7828439ff service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Received event network-vif-plugged-b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1271.709960] env[65503]: DEBUG oslo_concurrency.lockutils [req-29d1efe3-01bc-4553-88d4-55c4201c8407 req-7dee4c2a-94e5-43e0-8652-e8c7828439ff service nova] Acquiring lock "5babb2a4-c9a1-412c-8fd2-91880037d119-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.710176] env[65503]: DEBUG oslo_concurrency.lockutils [req-29d1efe3-01bc-4553-88d4-55c4201c8407 req-7dee4c2a-94e5-43e0-8652-e8c7828439ff service nova] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.710342] env[65503]: DEBUG oslo_concurrency.lockutils [req-29d1efe3-01bc-4553-88d4-55c4201c8407 req-7dee4c2a-94e5-43e0-8652-e8c7828439ff service nova] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.710507] env[65503]: DEBUG nova.compute.manager [req-29d1efe3-01bc-4553-88d4-55c4201c8407 req-7dee4c2a-94e5-43e0-8652-e8c7828439ff service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] No waiting events found dispatching network-vif-plugged-b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1271.710671] env[65503]: WARNING nova.compute.manager [req-29d1efe3-01bc-4553-88d4-55c4201c8407 req-7dee4c2a-94e5-43e0-8652-e8c7828439ff service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Received unexpected event network-vif-plugged-b331ee4d-cae5-46cf-b647-515091ba1275 for instance with vm_state building and task_state spawning. [ 1272.195848] env[65503]: DEBUG nova.network.neutron [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Successfully updated port: b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1272.698493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.698493] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.698710] env[65503]: DEBUG nova.network.neutron [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1273.202082] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1273.202372] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1273.239732] env[65503]: DEBUG nova.network.neutron [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1273.261993] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1273.262482] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1273.349723] env[65503]: WARNING neutronclient.v2_0.client [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1273.350411] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1273.350759] env[65503]: WARNING openstack [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1273.439692] env[65503]: DEBUG nova.network.neutron [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Updating instance_info_cache with network_info: [{"id": "b331ee4d-cae5-46cf-b647-515091ba1275", "address": "fa:16:3e:fc:e7:3f", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb331ee4d-ca", "ovs_interfaceid": "b331ee4d-cae5-46cf-b647-515091ba1275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1273.747724] env[65503]: DEBUG nova.compute.manager [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Received event network-changed-b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1273.748017] env[65503]: DEBUG nova.compute.manager [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Refreshing instance network info cache due to event network-changed-b331ee4d-cae5-46cf-b647-515091ba1275. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1273.748242] env[65503]: DEBUG oslo_concurrency.lockutils [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Acquiring lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.943266] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.943816] env[65503]: DEBUG nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Instance network_info: |[{"id": "b331ee4d-cae5-46cf-b647-515091ba1275", "address": "fa:16:3e:fc:e7:3f", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb331ee4d-ca", "ovs_interfaceid": "b331ee4d-cae5-46cf-b647-515091ba1275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1273.944319] env[65503]: DEBUG oslo_concurrency.lockutils [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Acquired lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.944607] env[65503]: DEBUG nova.network.neutron [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Refreshing network info cache for port b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1273.946585] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:e7:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b331ee4d-cae5-46cf-b647-515091ba1275', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1273.954875] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1273.956095] env[65503]: WARNING neutronclient.v2_0.client [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1273.956721] env[65503]: WARNING openstack [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1273.957097] env[65503]: WARNING openstack [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1273.965307] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1273.966057] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d6329ce-9ea6-4d49-b892-070f1b2a12a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.988214] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1273.988214] env[65503]: value = "task-4450987" [ 1273.988214] env[65503]: _type = "Task" [ 1273.988214] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.000606] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450987, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.071174] env[65503]: WARNING openstack [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1274.071174] env[65503]: WARNING openstack [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1274.141259] env[65503]: WARNING neutronclient.v2_0.client [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1274.141952] env[65503]: WARNING openstack [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1274.142317] env[65503]: WARNING openstack [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1274.226504] env[65503]: DEBUG nova.network.neutron [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Updated VIF entry in instance network info cache for port b331ee4d-cae5-46cf-b647-515091ba1275. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1274.226869] env[65503]: DEBUG nova.network.neutron [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Updating instance_info_cache with network_info: [{"id": "b331ee4d-cae5-46cf-b647-515091ba1275", "address": "fa:16:3e:fc:e7:3f", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb331ee4d-ca", "ovs_interfaceid": "b331ee4d-cae5-46cf-b647-515091ba1275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1274.499852] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450987, 'name': CreateVM_Task, 'duration_secs': 0.331761} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.500101] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1274.500505] env[65503]: WARNING neutronclient.v2_0.client [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1274.500876] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.501030] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.501457] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1274.501731] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c73fb3b-ef14-4f98-acda-b3351bf80abe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.508571] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1274.508571] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c0c9b-67d7-a744-f69a-b33aeaeec46a" [ 1274.508571] env[65503]: _type = "Task" [ 1274.508571] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.518265] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c0c9b-67d7-a744-f69a-b33aeaeec46a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.730277] env[65503]: DEBUG oslo_concurrency.lockutils [req-1783e88f-b31c-4f15-b337-1cda1223008e req-e7b8f6c3-e42d-4b42-91ba-8b699469c429 service nova] Releasing lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.024124] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c0c9b-67d7-a744-f69a-b33aeaeec46a, 'name': SearchDatastore_Task, 'duration_secs': 0.01307} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.024612] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.024969] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1275.025348] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.025585] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.025867] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.026419] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2465178-77f0-4fbe-b2df-f2a04d7b420a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.038742] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.038742] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1275.039567] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28ded6e9-be74-49f3-9fd4-9ede14ba1a4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.045942] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1275.045942] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fda80d-81b2-9e73-f10f-3c7796849a40" [ 1275.045942] env[65503]: _type = "Task" [ 1275.045942] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.056024] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fda80d-81b2-9e73-f10f-3c7796849a40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.558534] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fda80d-81b2-9e73-f10f-3c7796849a40, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.559330] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-826787e8-56f2-441c-ab21-93ad438fb062 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.565765] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1275.565765] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e68b08-72fa-71d7-c741-74a307924065" [ 1275.565765] env[65503]: _type = "Task" [ 1275.565765] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.574875] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e68b08-72fa-71d7-c741-74a307924065, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.079036] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e68b08-72fa-71d7-c741-74a307924065, 'name': SearchDatastore_Task, 'duration_secs': 0.010525} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.079411] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.079690] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1276.080016] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-670b0c5e-1e75-41eb-8b0b-5b311a0ff7ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.088130] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1276.088130] env[65503]: value = "task-4450989" [ 1276.088130] env[65503]: _type = "Task" [ 1276.088130] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.097894] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.600450] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450989, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.102696] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450989, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520911} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.103454] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1277.103454] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1277.103454] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93d347f0-c003-4fad-903f-b9be4ff61f07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.112098] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1277.112098] env[65503]: value = "task-4450990" [ 1277.112098] env[65503]: _type = "Task" [ 1277.112098] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.124501] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.623507] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067872} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.623924] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1277.624603] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd26422-a73a-489d-b253-0d55c786f576 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.649699] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1277.650051] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3978e1a9-d524-4af4-b1e8-d99ee354cb9d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.671091] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1277.671091] env[65503]: value = "task-4450991" [ 1277.671091] env[65503]: _type = "Task" [ 1277.671091] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.680531] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450991, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.182214] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450991, 'name': ReconfigVM_Task, 'duration_secs': 0.319129} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.182564] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1278.183259] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d50c243-9fef-4f34-b9aa-5943351f571a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.190645] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1278.190645] env[65503]: value = "task-4450992" [ 1278.190645] env[65503]: _type = "Task" [ 1278.190645] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.204352] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450992, 'name': Rename_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.702777] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450992, 'name': Rename_Task, 'duration_secs': 0.167933} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.703147] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1278.703299] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f4f5a5d-0673-440d-9088-5c848b09f977 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.711179] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1278.711179] env[65503]: value = "task-4450993" [ 1278.711179] env[65503]: _type = "Task" [ 1278.711179] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.721464] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.224325] env[65503]: DEBUG oslo_vmware.api [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4450993, 'name': PowerOnVM_Task, 'duration_secs': 0.487589} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.224669] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.224882] env[65503]: INFO nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Took 7.84 seconds to spawn the instance on the hypervisor. [ 1279.225174] env[65503]: DEBUG nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1279.226030] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b8e338-5359-47f8-bbea-e524753416fa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.338976] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "18508c5b-2830-41fd-b92c-675a6d04e6be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.339278] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.747155] env[65503]: INFO nova.compute.manager [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Took 12.58 seconds to build instance. [ 1279.842106] env[65503]: DEBUG nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1280.250051] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0d1265bd-b38b-49c8-bdf2-301cc9aeb3b4 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.089s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.372247] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.372526] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.374300] env[65503]: INFO nova.compute.claims [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1280.636840] env[65503]: DEBUG nova.compute.manager [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Received event network-changed-b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1280.636840] env[65503]: DEBUG nova.compute.manager [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Refreshing instance network info cache due to event network-changed-b331ee4d-cae5-46cf-b647-515091ba1275. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1280.636840] env[65503]: DEBUG oslo_concurrency.lockutils [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Acquiring lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.636840] env[65503]: DEBUG oslo_concurrency.lockutils [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Acquired lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.636840] env[65503]: DEBUG nova.network.neutron [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Refreshing network info cache for port b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1280.974580] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "c621ac90-4619-4e67-9494-a8817744a4be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.974835] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "c621ac90-4619-4e67-9494-a8817744a4be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.140714] env[65503]: WARNING neutronclient.v2_0.client [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1281.141454] env[65503]: WARNING openstack [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1281.141781] env[65503]: WARNING openstack [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1281.253823] env[65503]: WARNING openstack [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1281.254355] env[65503]: WARNING openstack [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1281.319916] env[65503]: WARNING neutronclient.v2_0.client [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1281.320849] env[65503]: WARNING openstack [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1281.321324] env[65503]: WARNING openstack [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1281.418291] env[65503]: DEBUG nova.network.neutron [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Updated VIF entry in instance network info cache for port b331ee4d-cae5-46cf-b647-515091ba1275. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1281.418670] env[65503]: DEBUG nova.network.neutron [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Updating instance_info_cache with network_info: [{"id": "b331ee4d-cae5-46cf-b647-515091ba1275", "address": "fa:16:3e:fc:e7:3f", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb331ee4d-ca", "ovs_interfaceid": "b331ee4d-cae5-46cf-b647-515091ba1275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1281.476721] env[65503]: DEBUG nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1281.515712] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacfb416-32a2-4911-98cb-abd9add875f1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.524224] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66a656c-74e2-4607-aa59-b80e77ab299d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.555871] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed461125-5ea1-495c-87d7-0d968e612e92 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.564627] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263bd187-497a-451a-b417-7a71f7646e9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.582957] env[65503]: DEBUG nova.compute.provider_tree [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.922030] env[65503]: DEBUG oslo_concurrency.lockutils [req-162c7aae-d2a8-48fc-896a-a9ea533d7064 req-755ec5b9-a9ce-4178-a688-2cf786456f3c service nova] Releasing lock "refresh_cache-5babb2a4-c9a1-412c-8fd2-91880037d119" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.997432] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.086251] env[65503]: DEBUG nova.scheduler.client.report [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1282.592190] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.219s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.592631] env[65503]: DEBUG nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1282.595439] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.598s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1282.596847] env[65503]: INFO nova.compute.claims [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1283.101939] env[65503]: DEBUG nova.compute.utils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1283.104827] env[65503]: DEBUG nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1283.105061] env[65503]: DEBUG nova.network.neutron [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1283.105359] env[65503]: WARNING neutronclient.v2_0.client [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1283.105661] env[65503]: WARNING neutronclient.v2_0.client [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1283.106250] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1283.106669] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1283.150099] env[65503]: DEBUG nova.policy [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9955668c2464ddfb0eae34aa700ddd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '521d40776571452e85178972f97c8622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1283.464079] env[65503]: DEBUG nova.network.neutron [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Successfully created port: ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1283.605912] env[65503]: DEBUG nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1283.731397] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4579cb-d017-4cd0-83eb-2d07a7fb434b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.741022] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74cbbca-f0b8-4eed-8628-58fa7f7c1f25 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.770935] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4031c2d4-3a6b-4d43-a8d2-07e2da3eb0b5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.779478] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9386761-bfc7-419c-9c3f-8900c3159c8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.795077] env[65503]: DEBUG nova.compute.provider_tree [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1284.113023] env[65503]: INFO nova.virt.block_device [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Booting with volume 30d1da98-0d01-4336-8a09-38e1c3611707 at /dev/sda [ 1284.148764] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb61b12f-e85c-4c44-a21b-f19ddca2a5f8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.159726] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2af541e-d082-45ff-85fa-af1e8351686d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.190257] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f49b30f-efce-42a8-b4e4-b1ca28aad158 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.198935] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6f13a2-f48e-44bb-8ee0-01e04eb51109 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.226980] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b09e38-acf7-44c9-a936-8bfcd7ab9140 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.234391] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a09aabd-52f5-4d9c-a493-0d42b6d3f230 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.248544] env[65503]: DEBUG nova.virt.block_device [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating existing volume attachment record: 62a5539e-3220-46f0-a53a-5a8c50b7a2fd {{(pid=65503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1284.299038] env[65503]: DEBUG nova.scheduler.client.report [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1284.804202] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.804325] env[65503]: DEBUG nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1284.948283] env[65503]: DEBUG nova.compute.manager [req-737049b5-0b7a-43af-878d-7775a8959d02 req-3ef7127c-8d2e-49c3-b705-da19a8356ce7 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Received event network-vif-plugged-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1284.948509] env[65503]: DEBUG oslo_concurrency.lockutils [req-737049b5-0b7a-43af-878d-7775a8959d02 req-3ef7127c-8d2e-49c3-b705-da19a8356ce7 service nova] Acquiring lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.948767] env[65503]: DEBUG oslo_concurrency.lockutils [req-737049b5-0b7a-43af-878d-7775a8959d02 req-3ef7127c-8d2e-49c3-b705-da19a8356ce7 service nova] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.948878] env[65503]: DEBUG oslo_concurrency.lockutils [req-737049b5-0b7a-43af-878d-7775a8959d02 req-3ef7127c-8d2e-49c3-b705-da19a8356ce7 service nova] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.949046] env[65503]: DEBUG nova.compute.manager [req-737049b5-0b7a-43af-878d-7775a8959d02 req-3ef7127c-8d2e-49c3-b705-da19a8356ce7 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] No waiting events found dispatching network-vif-plugged-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1284.949211] env[65503]: WARNING nova.compute.manager [req-737049b5-0b7a-43af-878d-7775a8959d02 req-3ef7127c-8d2e-49c3-b705-da19a8356ce7 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Received unexpected event network-vif-plugged-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 for instance with vm_state building and task_state block_device_mapping. [ 1285.036471] env[65503]: DEBUG nova.network.neutron [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Successfully updated port: ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1285.264961] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.265326] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.265546] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.265730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.265891] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.268252] env[65503]: INFO nova.compute.manager [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Terminating instance [ 1285.309775] env[65503]: DEBUG nova.compute.utils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1285.311416] env[65503]: DEBUG nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1285.311522] env[65503]: DEBUG nova.network.neutron [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1285.311829] env[65503]: WARNING neutronclient.v2_0.client [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1285.312158] env[65503]: WARNING neutronclient.v2_0.client [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1285.312768] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1285.313147] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1285.362432] env[65503]: DEBUG nova.policy [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65b9188b53704418b71494774a2e1d66', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfe83a39b0ba45ca91078afb31ccb0d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1285.539606] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.540279] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.540279] env[65503]: DEBUG nova.network.neutron [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1285.662588] env[65503]: DEBUG nova.network.neutron [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Successfully created port: ec974d8b-e15e-486d-acbb-c01dfceb2bba {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1285.772384] env[65503]: DEBUG nova.compute.manager [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1285.772593] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1285.773611] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34d0cfe-2a4b-45ba-95e4-df0c587b4c5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.782701] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1285.782999] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86141b9c-7274-4601-9dcf-c84a995360a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.791512] env[65503]: DEBUG oslo_vmware.api [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1285.791512] env[65503]: value = "task-4450994" [ 1285.791512] env[65503]: _type = "Task" [ 1285.791512] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.800565] env[65503]: DEBUG oslo_vmware.api [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.821901] env[65503]: DEBUG nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1286.043431] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1286.043952] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1286.076769] env[65503]: DEBUG nova.network.neutron [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1286.093576] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1286.093951] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1286.149628] env[65503]: WARNING neutronclient.v2_0.client [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1286.150304] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1286.150650] env[65503]: WARNING openstack [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1286.230442] env[65503]: DEBUG nova.network.neutron [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [{"id": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "address": "fa:16:3e:c7:ab:46", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7c7a8f-f0", "ovs_interfaceid": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1286.302733] env[65503]: DEBUG oslo_vmware.api [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450994, 'name': PowerOffVM_Task, 'duration_secs': 0.204007} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.303043] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.303281] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1286.303593] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90d7dc39-c703-4c4f-9a28-7f93543bdad4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.342264] env[65503]: DEBUG nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1286.342888] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1286.343129] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1286.343283] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1286.343463] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1286.343618] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1286.343803] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1286.344031] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1286.344201] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1286.344372] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1286.344535] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1286.344709] env[65503]: DEBUG nova.virt.hardware [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1286.345778] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d02d0b4-69a2-4fdc-9ef2-e5b4c14637ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.354688] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441e5f1f-6bb4-49dc-ba68-b203538aea40 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.381613] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1286.381913] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1286.382164] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleting the datastore file [datastore2] fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1286.382433] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad4f2a1b-b6bd-4935-8029-8dcb815d8885 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.390776] env[65503]: DEBUG oslo_vmware.api [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for the task: (returnval){ [ 1286.390776] env[65503]: value = "task-4450996" [ 1286.390776] env[65503]: _type = "Task" [ 1286.390776] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.399599] env[65503]: DEBUG oslo_vmware.api [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.733665] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.734152] env[65503]: DEBUG nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Instance network_info: |[{"id": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "address": "fa:16:3e:c7:ab:46", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7c7a8f-f0", "ovs_interfaceid": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1286.734657] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:ab:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a7ba8d0-0208-4af7-af44-2a5ad382f9be', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1286.742388] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1286.742626] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1286.742868] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-676b4e8e-1566-48c7-8596-189c060ed5e3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.764132] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1286.764132] env[65503]: value = "task-4450997" [ 1286.764132] env[65503]: _type = "Task" [ 1286.764132] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.772807] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450997, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.832466] env[65503]: DEBUG nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1286.863195] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1286.863478] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1286.863605] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1286.863777] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1286.863917] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1286.864069] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1286.864289] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1286.864474] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1286.864641] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1286.864799] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1286.864968] env[65503]: DEBUG nova.virt.hardware [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1286.865966] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974482e2-c5d3-4f80-acf9-eef1e8d95f20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.876561] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e405ea50-d772-4595-b801-113671fea0ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.902167] env[65503]: DEBUG oslo_vmware.api [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Task: {'id': task-4450996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138382} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.902469] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1286.902668] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1286.902838] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1286.903022] env[65503]: INFO nova.compute.manager [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1286.903280] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1286.903492] env[65503]: DEBUG nova.compute.manager [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1286.903593] env[65503]: DEBUG nova.network.neutron [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1286.903847] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1286.904394] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1286.904658] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1286.951544] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1286.993077] env[65503]: DEBUG nova.compute.manager [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Received event network-changed-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1286.993350] env[65503]: DEBUG nova.compute.manager [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Refreshing instance network info cache due to event network-changed-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1286.994177] env[65503]: DEBUG oslo_concurrency.lockutils [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Acquiring lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.994177] env[65503]: DEBUG oslo_concurrency.lockutils [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Acquired lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.994177] env[65503]: DEBUG nova.network.neutron [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Refreshing network info cache for port ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1287.108509] env[65503]: DEBUG nova.compute.manager [req-a1d1e75c-f348-4862-8336-6655f4e612e9 req-ae1804e6-fdd0-4071-af15-470c6ede8b5e service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Received event network-vif-plugged-ec974d8b-e15e-486d-acbb-c01dfceb2bba {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1287.108735] env[65503]: DEBUG oslo_concurrency.lockutils [req-a1d1e75c-f348-4862-8336-6655f4e612e9 req-ae1804e6-fdd0-4071-af15-470c6ede8b5e service nova] Acquiring lock "c621ac90-4619-4e67-9494-a8817744a4be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.109538] env[65503]: DEBUG oslo_concurrency.lockutils [req-a1d1e75c-f348-4862-8336-6655f4e612e9 req-ae1804e6-fdd0-4071-af15-470c6ede8b5e service nova] Lock "c621ac90-4619-4e67-9494-a8817744a4be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.109538] env[65503]: DEBUG oslo_concurrency.lockutils [req-a1d1e75c-f348-4862-8336-6655f4e612e9 req-ae1804e6-fdd0-4071-af15-470c6ede8b5e service nova] Lock "c621ac90-4619-4e67-9494-a8817744a4be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.109538] env[65503]: DEBUG nova.compute.manager [req-a1d1e75c-f348-4862-8336-6655f4e612e9 req-ae1804e6-fdd0-4071-af15-470c6ede8b5e service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] No waiting events found dispatching network-vif-plugged-ec974d8b-e15e-486d-acbb-c01dfceb2bba {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1287.109775] env[65503]: WARNING nova.compute.manager [req-a1d1e75c-f348-4862-8336-6655f4e612e9 req-ae1804e6-fdd0-4071-af15-470c6ede8b5e service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Received unexpected event network-vif-plugged-ec974d8b-e15e-486d-acbb-c01dfceb2bba for instance with vm_state building and task_state spawning. [ 1287.199995] env[65503]: DEBUG nova.network.neutron [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Successfully updated port: ec974d8b-e15e-486d-acbb-c01dfceb2bba {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1287.275421] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4450997, 'name': CreateVM_Task, 'duration_secs': 0.436043} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.275610] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1287.276232] env[65503]: WARNING neutronclient.v2_0.client [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1287.276507] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'device_type': None, 'attachment_id': '62a5539e-3220-46f0-a53a-5a8c50b7a2fd', 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870516', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'name': 'volume-30d1da98-0d01-4336-8a09-38e1c3611707', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18508c5b-2830-41fd-b92c-675a6d04e6be', 'attached_at': '', 'detached_at': '', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'serial': '30d1da98-0d01-4336-8a09-38e1c3611707'}, 'volume_type': None}], 'swap': None} {{(pid=65503) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1287.276712] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Root volume attach. Driver type: vmdk {{(pid=65503) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1287.277635] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b031c4eb-dd79-403e-9252-d8e608712004 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.288042] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3851a8-00a7-46f6-bbcb-f8b8b389d331 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.295992] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26284630-3757-4cf8-b008-a46ecd4118f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.303139] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-41256ffd-1bfe-4796-877e-5048df74daec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.310988] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1287.310988] env[65503]: value = "task-4450998" [ 1287.310988] env[65503]: _type = "Task" [ 1287.310988] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.322646] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450998, 'name': RelocateVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.496236] env[65503]: WARNING neutronclient.v2_0.client [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1287.496901] env[65503]: WARNING openstack [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1287.497442] env[65503]: WARNING openstack [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1287.604398] env[65503]: WARNING openstack [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1287.604828] env[65503]: WARNING openstack [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1287.671298] env[65503]: WARNING neutronclient.v2_0.client [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1287.671988] env[65503]: WARNING openstack [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1287.672389] env[65503]: WARNING openstack [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1287.701735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "refresh_cache-c621ac90-4619-4e67-9494-a8817744a4be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.701974] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquired lock "refresh_cache-c621ac90-4619-4e67-9494-a8817744a4be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.702209] env[65503]: DEBUG nova.network.neutron [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1287.759108] env[65503]: DEBUG nova.network.neutron [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updated VIF entry in instance network info cache for port ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1287.759469] env[65503]: DEBUG nova.network.neutron [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [{"id": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "address": "fa:16:3e:c7:ab:46", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7c7a8f-f0", "ovs_interfaceid": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1287.821880] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450998, 'name': RelocateVM_Task, 'duration_secs': 0.456927} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.822206] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Volume attach. Driver type: vmdk {{(pid=65503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1287.822406] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870516', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'name': 'volume-30d1da98-0d01-4336-8a09-38e1c3611707', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18508c5b-2830-41fd-b92c-675a6d04e6be', 'attached_at': '', 'detached_at': '', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'serial': '30d1da98-0d01-4336-8a09-38e1c3611707'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1287.823187] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c1a424-d1e4-4659-bcc5-c95300750f68 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.840603] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b285bc2f-5b1a-4d62-ba1a-e724f02bb96a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.863776] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-30d1da98-0d01-4336-8a09-38e1c3611707/volume-30d1da98-0d01-4336-8a09-38e1c3611707.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1287.864108] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a47390e3-3f7a-4188-9229-a9ba15286ef4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.886281] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1287.886281] env[65503]: value = "task-4450999" [ 1287.886281] env[65503]: _type = "Task" [ 1287.886281] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.899054] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450999, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.922391] env[65503]: DEBUG nova.network.neutron [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1288.205949] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1288.206128] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1288.244536] env[65503]: DEBUG nova.network.neutron [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1288.262683] env[65503]: DEBUG oslo_concurrency.lockutils [req-acc19861-147c-42f6-a558-001099064fd1 req-b8db7e12-bfe3-42be-bcbd-4d5177ed2254 service nova] Releasing lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1288.265310] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1288.265684] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1288.328294] env[65503]: WARNING neutronclient.v2_0.client [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1288.329033] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1288.329548] env[65503]: WARNING openstack [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1288.397615] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4450999, 'name': ReconfigVM_Task, 'duration_secs': 0.320461} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.397919] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-30d1da98-0d01-4336-8a09-38e1c3611707/volume-30d1da98-0d01-4336-8a09-38e1c3611707.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1288.402878] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac23364f-bc52-4f50-882c-b792368a4793 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.415781] env[65503]: DEBUG nova.network.neutron [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Updating instance_info_cache with network_info: [{"id": "ec974d8b-e15e-486d-acbb-c01dfceb2bba", "address": "fa:16:3e:4a:42:96", "network": {"id": "9c0c7f3e-3a07-4e85-b53e-57f4eb24274d", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1337783297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe83a39b0ba45ca91078afb31ccb0d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec974d8b-e1", "ovs_interfaceid": "ec974d8b-e15e-486d-acbb-c01dfceb2bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1288.423121] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1288.423121] env[65503]: value = "task-4451000" [ 1288.423121] env[65503]: _type = "Task" [ 1288.423121] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.426785] env[65503]: INFO nova.compute.manager [-] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Took 1.52 seconds to deallocate network for instance. [ 1288.436752] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451000, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.919430] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Releasing lock "refresh_cache-c621ac90-4619-4e67-9494-a8817744a4be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1288.919844] env[65503]: DEBUG nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Instance network_info: |[{"id": "ec974d8b-e15e-486d-acbb-c01dfceb2bba", "address": "fa:16:3e:4a:42:96", "network": {"id": "9c0c7f3e-3a07-4e85-b53e-57f4eb24274d", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1337783297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe83a39b0ba45ca91078afb31ccb0d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec974d8b-e1", "ovs_interfaceid": "ec974d8b-e15e-486d-acbb-c01dfceb2bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1288.920331] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:42:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d377d75-3add-4a15-8691-74b2eb010924', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec974d8b-e15e-486d-acbb-c01dfceb2bba', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1288.928009] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Creating folder: Project (bfe83a39b0ba45ca91078afb31ccb0d2). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1288.928358] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42e53157-8322-4a2f-9437-3fae52415361 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.942344] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.942632] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.942853] env[65503]: DEBUG nova.objects.instance [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lazy-loading 'resources' on Instance uuid fb2dddac-4ac0-498a-b972-e61255833ad0 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.944329] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451000, 'name': ReconfigVM_Task, 'duration_secs': 0.13822} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.945883] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870516', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'name': 'volume-30d1da98-0d01-4336-8a09-38e1c3611707', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18508c5b-2830-41fd-b92c-675a6d04e6be', 'attached_at': '', 'detached_at': '', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'serial': '30d1da98-0d01-4336-8a09-38e1c3611707'} {{(pid=65503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1288.946444] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Created folder: Project (bfe83a39b0ba45ca91078afb31ccb0d2) in parent group-v870190. [ 1288.946605] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Creating folder: Instances. Parent ref: group-v870519. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1288.946830] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-570c5c00-4d37-41ba-890e-78c206b59400 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.948602] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c2cd90f-e5ea-4832-b658-3507bef499d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.957164] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1288.957164] env[65503]: value = "task-4451002" [ 1288.957164] env[65503]: _type = "Task" [ 1288.957164] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.961915] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Created folder: Instances in parent group-v870519. [ 1288.962159] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1288.962720] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1288.962932] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a34cbbc-3503-4bd6-a66b-9343b8e99d8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.981019] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451002, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.987325] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1288.987325] env[65503]: value = "task-4451004" [ 1288.987325] env[65503]: _type = "Task" [ 1288.987325] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.996180] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451004, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.023558] env[65503]: DEBUG nova.compute.manager [req-117de5ae-d755-4405-a022-4308e5b40942 req-a03d1931-d129-4cf2-abfa-e15bddf91cb4 service nova] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Received event network-vif-deleted-c7da6c50-239a-487b-91ca-3e82cb4d3794 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1289.136915] env[65503]: DEBUG nova.compute.manager [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Received event network-changed-ec974d8b-e15e-486d-acbb-c01dfceb2bba {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1289.137144] env[65503]: DEBUG nova.compute.manager [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Refreshing instance network info cache due to event network-changed-ec974d8b-e15e-486d-acbb-c01dfceb2bba. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1289.137447] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Acquiring lock "refresh_cache-c621ac90-4619-4e67-9494-a8817744a4be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.137626] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Acquired lock "refresh_cache-c621ac90-4619-4e67-9494-a8817744a4be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1289.137757] env[65503]: DEBUG nova.network.neutron [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Refreshing network info cache for port ec974d8b-e15e-486d-acbb-c01dfceb2bba {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1289.467656] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451002, 'name': Rename_Task, 'duration_secs': 0.136119} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.470608] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1289.471087] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a06f45c-ccf6-44b9-ba62-e9e8c7624e2c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.480144] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1289.480144] env[65503]: value = "task-4451005" [ 1289.480144] env[65503]: _type = "Task" [ 1289.480144] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.495038] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451005, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.502344] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451004, 'name': CreateVM_Task, 'duration_secs': 0.353595} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.502589] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1289.503296] env[65503]: WARNING neutronclient.v2_0.client [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1289.503800] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.504054] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1289.504536] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1289.504873] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89becffd-b21c-4d8d-961e-68f2b67de276 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.511549] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1289.511549] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52261302-422c-c495-e880-5bc5a729678a" [ 1289.511549] env[65503]: _type = "Task" [ 1289.511549] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.520630] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52261302-422c-c495-e880-5bc5a729678a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.559233] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1cecb6-9bd9-425d-bc27-dcff1588c929 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.569155] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd159bc2-bca5-4edb-ad47-94e1d57e1af7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.609810] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515be23f-1908-4896-a310-6fe3263f7d41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.619031] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c7542c-eff2-4b87-9c74-e87b1332f9f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.636080] env[65503]: DEBUG nova.compute.provider_tree [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.640391] env[65503]: WARNING neutronclient.v2_0.client [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1289.641094] env[65503]: WARNING openstack [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1289.641447] env[65503]: WARNING openstack [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1289.738541] env[65503]: WARNING openstack [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1289.738932] env[65503]: WARNING openstack [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1289.806521] env[65503]: WARNING neutronclient.v2_0.client [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1289.807237] env[65503]: WARNING openstack [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1289.807627] env[65503]: WARNING openstack [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1289.896699] env[65503]: DEBUG nova.network.neutron [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Updated VIF entry in instance network info cache for port ec974d8b-e15e-486d-acbb-c01dfceb2bba. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1289.897148] env[65503]: DEBUG nova.network.neutron [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Updating instance_info_cache with network_info: [{"id": "ec974d8b-e15e-486d-acbb-c01dfceb2bba", "address": "fa:16:3e:4a:42:96", "network": {"id": "9c0c7f3e-3a07-4e85-b53e-57f4eb24274d", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1337783297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe83a39b0ba45ca91078afb31ccb0d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec974d8b-e1", "ovs_interfaceid": "ec974d8b-e15e-486d-acbb-c01dfceb2bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1289.991514] env[65503]: DEBUG oslo_vmware.api [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451005, 'name': PowerOnVM_Task, 'duration_secs': 0.456472} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.991807] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1289.992015] env[65503]: INFO nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Took 3.65 seconds to spawn the instance on the hypervisor. [ 1289.992203] env[65503]: DEBUG nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1289.993035] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bc02e8-4fd1-4a74-8686-2038dbd2533e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.023406] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52261302-422c-c495-e880-5bc5a729678a, 'name': SearchDatastore_Task, 'duration_secs': 0.011721} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.023781] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1290.024041] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1290.024280] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.024425] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.024600] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1290.024975] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db6d9c16-5e7e-432f-ad97-371fa11f29ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.035487] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1290.035739] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1290.037323] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfdc90b2-1273-4439-ae09-86ffa1e573b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.044711] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1290.044711] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]525949d6-469f-ed40-12a4-cb9331075d7c" [ 1290.044711] env[65503]: _type = "Task" [ 1290.044711] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.054283] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525949d6-469f-ed40-12a4-cb9331075d7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.140308] env[65503]: DEBUG nova.scheduler.client.report [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1290.400307] env[65503]: DEBUG oslo_concurrency.lockutils [req-cd56761f-1e1b-4be5-817e-7019ea334152 req-04b7f3ea-393e-40d7-a4e0-c28ac7fecda6 service nova] Releasing lock "refresh_cache-c621ac90-4619-4e67-9494-a8817744a4be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1290.511759] env[65503]: INFO nova.compute.manager [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Took 10.16 seconds to build instance. [ 1290.556114] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]525949d6-469f-ed40-12a4-cb9331075d7c, 'name': SearchDatastore_Task, 'duration_secs': 0.011357} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.556983] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-605de5fe-3354-4f8d-8cd0-fafd27092789 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.562948] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1290.562948] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52856f1b-ddf3-e647-d5aa-53bc76ca5636" [ 1290.562948] env[65503]: _type = "Task" [ 1290.562948] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.571903] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52856f1b-ddf3-e647-d5aa-53bc76ca5636, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.645408] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.703s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.671292] env[65503]: INFO nova.scheduler.client.report [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Deleted allocations for instance fb2dddac-4ac0-498a-b972-e61255833ad0 [ 1291.014544] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9ea71845-7a42-491d-9469-cb93e3e33999 tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.675s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.074833] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52856f1b-ddf3-e647-d5aa-53bc76ca5636, 'name': SearchDatastore_Task, 'duration_secs': 0.009416} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.075206] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.075533] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] c621ac90-4619-4e67-9494-a8817744a4be/c621ac90-4619-4e67-9494-a8817744a4be.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1291.075856] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb5ae579-c845-4a63-bb7b-30f5062807c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.084653] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1291.084653] env[65503]: value = "task-4451006" [ 1291.084653] env[65503]: _type = "Task" [ 1291.084653] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.094043] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.180300] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a12979d6-896c-48a9-b196-944236f8632f tempest-AttachVolumeShelveTestJSON-136164908 tempest-AttachVolumeShelveTestJSON-136164908-project-member] Lock "fb2dddac-4ac0-498a-b972-e61255833ad0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.915s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.596258] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451006, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471908} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.596633] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] c621ac90-4619-4e67-9494-a8817744a4be/c621ac90-4619-4e67-9494-a8817744a4be.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1291.596988] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1291.597133] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-358ac77f-cd73-46e4-877d-3cc65161707d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.608964] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1291.608964] env[65503]: value = "task-4451007" [ 1291.608964] env[65503]: _type = "Task" [ 1291.608964] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.625878] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.638120] env[65503]: DEBUG nova.compute.manager [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Received event network-changed-40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1291.638120] env[65503]: DEBUG nova.compute.manager [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Refreshing instance network info cache due to event network-changed-40a9564e-b61f-47ad-9d1b-9494f3514527. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1291.638120] env[65503]: DEBUG oslo_concurrency.lockutils [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Acquiring lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.638120] env[65503]: DEBUG oslo_concurrency.lockutils [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Acquired lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1291.638120] env[65503]: DEBUG nova.network.neutron [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Refreshing network info cache for port 40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1292.119916] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069751} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.120385] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1292.121217] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e34fd8-e9a1-4c8a-9501-28194c67248a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.145199] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] c621ac90-4619-4e67-9494-a8817744a4be/c621ac90-4619-4e67-9494-a8817744a4be.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1292.145705] env[65503]: WARNING neutronclient.v2_0.client [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1292.146338] env[65503]: WARNING openstack [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1292.146709] env[65503]: WARNING openstack [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1292.153547] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb424679-2cf4-41a2-b4c4-2b88f504a12a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.175575] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1292.175575] env[65503]: value = "task-4451008" [ 1292.175575] env[65503]: _type = "Task" [ 1292.175575] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.185370] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451008, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.294009] env[65503]: WARNING openstack [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1292.294448] env[65503]: WARNING openstack [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1292.358851] env[65503]: WARNING neutronclient.v2_0.client [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1292.359530] env[65503]: WARNING openstack [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1292.359867] env[65503]: WARNING openstack [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1292.497291] env[65503]: DEBUG nova.network.neutron [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updated VIF entry in instance network info cache for port 40a9564e-b61f-47ad-9d1b-9494f3514527. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1292.497660] env[65503]: DEBUG nova.network.neutron [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updating instance_info_cache with network_info: [{"id": "40a9564e-b61f-47ad-9d1b-9494f3514527", "address": "fa:16:3e:79:2e:8d", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a9564e-b6", "ovs_interfaceid": "40a9564e-b61f-47ad-9d1b-9494f3514527", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1292.690030] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451008, 'name': ReconfigVM_Task, 'duration_secs': 0.333432} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.690030] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Reconfigured VM instance instance-00000074 to attach disk [datastore1] c621ac90-4619-4e67-9494-a8817744a4be/c621ac90-4619-4e67-9494-a8817744a4be.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1292.691207] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-636b04f7-48c7-4f29-b86d-6fedb71625fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.698128] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1292.698128] env[65503]: value = "task-4451009" [ 1292.698128] env[65503]: _type = "Task" [ 1292.698128] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.708261] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451009, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.001414] env[65503]: DEBUG oslo_concurrency.lockutils [req-ea13a688-7297-4f74-8373-f1d706b3e577 req-7d113cd2-a0b1-4078-8594-46e66a7c0872 service nova] Releasing lock "refresh_cache-d3ca90c9-3dfa-47a5-b48b-67a45ea26021" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.145478] env[65503]: DEBUG nova.compute.manager [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1293.209280] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451009, 'name': Rename_Task, 'duration_secs': 0.15495} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.209570] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1293.209825] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a90ca93-7611-4c2e-939a-073ad7ab21ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.217696] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1293.217696] env[65503]: value = "task-4451011" [ 1293.217696] env[65503]: _type = "Task" [ 1293.217696] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.227104] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451011, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.668809] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.669130] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.672415] env[65503]: DEBUG nova.compute.manager [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Received event network-changed-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1293.672596] env[65503]: DEBUG nova.compute.manager [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Refreshing instance network info cache due to event network-changed-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1293.672833] env[65503]: DEBUG oslo_concurrency.lockutils [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Acquiring lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.672976] env[65503]: DEBUG oslo_concurrency.lockutils [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Acquired lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.673188] env[65503]: DEBUG nova.network.neutron [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Refreshing network info cache for port ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1293.729928] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451011, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.178417] env[65503]: INFO nova.compute.claims [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1294.182303] env[65503]: WARNING neutronclient.v2_0.client [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1294.182972] env[65503]: WARNING openstack [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1294.183337] env[65503]: WARNING openstack [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1294.232697] env[65503]: DEBUG oslo_vmware.api [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451011, 'name': PowerOnVM_Task, 'duration_secs': 0.532434} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.232886] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1294.233135] env[65503]: INFO nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Took 7.40 seconds to spawn the instance on the hypervisor. [ 1294.233282] env[65503]: DEBUG nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1294.234121] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5b02b2-7224-4b7b-80cf-53a09b533e8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.318307] env[65503]: WARNING openstack [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1294.318723] env[65503]: WARNING openstack [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1294.387701] env[65503]: WARNING neutronclient.v2_0.client [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1294.388432] env[65503]: WARNING openstack [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1294.388820] env[65503]: WARNING openstack [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1294.471774] env[65503]: DEBUG nova.network.neutron [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updated VIF entry in instance network info cache for port ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1294.472191] env[65503]: DEBUG nova.network.neutron [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [{"id": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "address": "fa:16:3e:c7:ab:46", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7c7a8f-f0", "ovs_interfaceid": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1294.693731] env[65503]: INFO nova.compute.resource_tracker [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating resource usage from migration f97b7bb4-b78d-4910-acd7-12967444bdb8 [ 1294.750513] env[65503]: INFO nova.compute.manager [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Took 12.77 seconds to build instance. [ 1294.804698] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef28cc4-ba1c-4703-b3d7-2d3f3cb9ebaf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.815614] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e967d268-735c-4754-b913-220ca8762e4e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.848811] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1b1cec-1255-4b3f-8061-a99c52c1833b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.858118] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ae7dc0-ed0d-4516-b816-bc141c66428e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.874605] env[65503]: DEBUG nova.compute.provider_tree [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.975546] env[65503]: DEBUG oslo_concurrency.lockutils [req-ef4c51c2-5489-48e3-97fb-d49b6ab8a72d req-a1d3ef6b-d455-4315-9df0-879fc01e5fbb service nova] Releasing lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.256344] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1dc01806-3856-4607-991f-0b733a6ba288 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "c621ac90-4619-4e67-9494-a8817744a4be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.281s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.378055] env[65503]: DEBUG nova.scheduler.client.report [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1295.882383] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.213s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.882683] env[65503]: INFO nova.compute.manager [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Migrating [ 1296.273780] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "c621ac90-4619-4e67-9494-a8817744a4be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.274070] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "c621ac90-4619-4e67-9494-a8817744a4be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.274281] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "c621ac90-4619-4e67-9494-a8817744a4be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.274460] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "c621ac90-4619-4e67-9494-a8817744a4be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.274623] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "c621ac90-4619-4e67-9494-a8817744a4be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.276751] env[65503]: INFO nova.compute.manager [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Terminating instance [ 1296.397440] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.397749] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.397970] env[65503]: DEBUG nova.network.neutron [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1296.780885] env[65503]: DEBUG nova.compute.manager [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1296.781132] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1296.782067] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e4baf4-af9c-4f8d-95a6-122499a70d41 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.790697] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1296.790963] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51c4408f-81c4-4752-9fb1-578c5aa03c5f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.799354] env[65503]: DEBUG oslo_vmware.api [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1296.799354] env[65503]: value = "task-4451012" [ 1296.799354] env[65503]: _type = "Task" [ 1296.799354] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.808429] env[65503]: DEBUG oslo_vmware.api [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.902257] env[65503]: WARNING neutronclient.v2_0.client [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1296.903792] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1296.904339] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1297.100413] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1297.100815] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1297.161790] env[65503]: WARNING neutronclient.v2_0.client [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1297.162487] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1297.162828] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1297.243614] env[65503]: DEBUG nova.network.neutron [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [{"id": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "address": "fa:16:3e:c7:ab:46", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7c7a8f-f0", "ovs_interfaceid": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1297.310892] env[65503]: DEBUG oslo_vmware.api [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451012, 'name': PowerOffVM_Task, 'duration_secs': 0.233143} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.311203] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1297.311371] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1297.311636] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a983727-3540-4c74-8b1b-a223e221e663 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.375274] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1297.375274] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1297.375450] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Deleting the datastore file [datastore1] c621ac90-4619-4e67-9494-a8817744a4be {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1297.375741] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e15b147a-0a6d-4707-94cd-5777445df8f4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.382915] env[65503]: DEBUG oslo_vmware.api [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for the task: (returnval){ [ 1297.382915] env[65503]: value = "task-4451014" [ 1297.382915] env[65503]: _type = "Task" [ 1297.382915] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.392953] env[65503]: DEBUG oslo_vmware.api [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.747470] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.894577] env[65503]: DEBUG oslo_vmware.api [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Task: {'id': task-4451014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144994} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.894887] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1297.895107] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1297.895320] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1297.895515] env[65503]: INFO nova.compute.manager [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1297.895778] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1297.896012] env[65503]: DEBUG nova.compute.manager [-] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1297.896114] env[65503]: DEBUG nova.network.neutron [-] [instance: c621ac90-4619-4e67-9494-a8817744a4be] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1297.896334] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1297.896848] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1297.897118] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1297.934625] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1298.184641] env[65503]: DEBUG nova.compute.manager [req-9518c855-cb87-47a1-9fe4-182cc00598c0 req-f7da01cf-181a-4925-ab93-a3ffc607df35 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Received event network-vif-deleted-ec974d8b-e15e-486d-acbb-c01dfceb2bba {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1298.184832] env[65503]: INFO nova.compute.manager [req-9518c855-cb87-47a1-9fe4-182cc00598c0 req-f7da01cf-181a-4925-ab93-a3ffc607df35 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Neutron deleted interface ec974d8b-e15e-486d-acbb-c01dfceb2bba; detaching it from the instance and deleting it from the info cache [ 1298.185258] env[65503]: DEBUG nova.network.neutron [req-9518c855-cb87-47a1-9fe4-182cc00598c0 req-f7da01cf-181a-4925-ab93-a3ffc607df35 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1298.664398] env[65503]: DEBUG nova.network.neutron [-] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1298.688485] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01702e8e-cebc-4cd9-bd7f-4a7af0e3d48f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.701970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c884d7-ff4a-4d7a-baa4-71d75ff0add1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.731979] env[65503]: DEBUG nova.compute.manager [req-9518c855-cb87-47a1-9fe4-182cc00598c0 req-f7da01cf-181a-4925-ab93-a3ffc607df35 service nova] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Detach interface failed, port_id=ec974d8b-e15e-486d-acbb-c01dfceb2bba, reason: Instance c621ac90-4619-4e67-9494-a8817744a4be could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1299.167987] env[65503]: INFO nova.compute.manager [-] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Took 1.27 seconds to deallocate network for instance. [ 1299.262594] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a851f0-2732-4e41-9be5-4e8a100787e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.282430] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance '18508c5b-2830-41fd-b92c-675a6d04e6be' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1299.674897] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.675273] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.675618] env[65503]: DEBUG nova.objects.instance [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lazy-loading 'resources' on Instance uuid c621ac90-4619-4e67-9494-a8817744a4be {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1299.788899] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.789274] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03eae3b4-781e-4436-88f5-63bc2e3752c1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.797407] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1299.797407] env[65503]: value = "task-4451015" [ 1299.797407] env[65503]: _type = "Task" [ 1299.797407] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.806583] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.283361] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02357de6-707d-42a1-94f5-8375b31b1953 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.292223] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5300f7-f1de-484f-afcb-6d7ce3e109e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.328817] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d59bfe-0e60-4328-8794-41dfc4c864bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.334504] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451015, 'name': PowerOffVM_Task, 'duration_secs': 0.232064} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.335126] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1300.335504] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance '18508c5b-2830-41fd-b92c-675a6d04e6be' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1300.342032] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367eb909-0043-4180-9b9e-3fc21f68810b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.359712] env[65503]: DEBUG nova.compute.provider_tree [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1300.843971] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1300.844277] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1300.844558] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1300.844637] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1300.844774] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1300.844914] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1300.845127] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1300.845321] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1300.845478] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1300.845634] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1300.845800] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1300.852270] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fee9182a-3f63-40ec-82a7-a8c01c718206 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.863093] env[65503]: DEBUG nova.scheduler.client.report [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1300.872929] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1300.872929] env[65503]: value = "task-4451016" [ 1300.872929] env[65503]: _type = "Task" [ 1300.872929] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.882379] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451016, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.369603] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.694s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.384474] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451016, 'name': ReconfigVM_Task, 'duration_secs': 0.143464} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.384804] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance '18508c5b-2830-41fd-b92c-675a6d04e6be' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1301.389327] env[65503]: INFO nova.scheduler.client.report [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Deleted allocations for instance c621ac90-4619-4e67-9494-a8817744a4be [ 1301.893229] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1301.893466] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1301.893620] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1301.893800] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1301.893942] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1301.894099] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1301.894306] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1301.894494] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1301.894709] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1301.894873] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1301.895059] env[65503]: DEBUG nova.virt.hardware [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1301.900473] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1301.902671] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f7e11bb-b572-42ba-acc2-7025364f35e1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.916782] env[65503]: DEBUG oslo_concurrency.lockutils [None req-223e129d-1beb-4b7f-bbc0-b39e89d71f68 tempest-ServerTagsTestJSON-977560890 tempest-ServerTagsTestJSON-977560890-project-member] Lock "c621ac90-4619-4e67-9494-a8817744a4be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.643s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.924359] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1301.924359] env[65503]: value = "task-4451017" [ 1301.924359] env[65503]: _type = "Task" [ 1301.924359] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.934271] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.435574] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451017, 'name': ReconfigVM_Task, 'duration_secs': 0.149231} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.436023] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1302.436693] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffd5d1c-c098-4004-945a-c3662352fb31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.461240] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-30d1da98-0d01-4336-8a09-38e1c3611707/volume-30d1da98-0d01-4336-8a09-38e1c3611707.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1302.461550] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bf41518-3a17-4b80-b753-83c4bca60ff2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.479970] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1302.479970] env[65503]: value = "task-4451018" [ 1302.479970] env[65503]: _type = "Task" [ 1302.479970] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.488531] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451018, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.990770] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451018, 'name': ReconfigVM_Task, 'duration_secs': 0.245497} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.991023] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-30d1da98-0d01-4336-8a09-38e1c3611707/volume-30d1da98-0d01-4336-8a09-38e1c3611707.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1302.991308] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance '18508c5b-2830-41fd-b92c-675a6d04e6be' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1303.500061] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73800acd-49fd-41cc-a2b9-fbba2d607ee8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.519431] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3d1467-0e34-4212-b5a1-f5c1efc88c08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.542298] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance '18508c5b-2830-41fd-b92c-675a6d04e6be' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1305.167816] env[65503]: WARNING neutronclient.v2_0.client [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1305.202699] env[65503]: DEBUG nova.network.neutron [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Port ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1306.225943] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.226300] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.226452] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.231902] env[65503]: WARNING neutronclient.v2_0.client [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1307.262655] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.262843] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.263040] env[65503]: DEBUG nova.network.neutron [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1307.766360] env[65503]: WARNING neutronclient.v2_0.client [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1307.767097] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1307.767507] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1307.874664] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1307.875154] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1307.933049] env[65503]: WARNING neutronclient.v2_0.client [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1307.933728] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1307.934082] env[65503]: WARNING openstack [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1308.011810] env[65503]: DEBUG nova.network.neutron [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [{"id": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "address": "fa:16:3e:c7:ab:46", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7c7a8f-f0", "ovs_interfaceid": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1308.514552] env[65503]: DEBUG oslo_concurrency.lockutils [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1309.024420] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827d05b0-5a5f-4efe-a62d-3365a1cdda63 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.032156] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dbfd9d-a2e9-4e83-86bb-50cc60438156 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.134771] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2428ba2-9681-4c90-85ee-229c5b543a38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.154697] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4e5957-99aa-4508-bf7d-ffbd03e30cd0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.162668] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance '18508c5b-2830-41fd-b92c-675a6d04e6be' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1310.668983] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1310.669310] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08c802e2-5c22-47af-8b0d-05da08164f71 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.678399] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1310.678399] env[65503]: value = "task-4451019" [ 1310.678399] env[65503]: _type = "Task" [ 1310.678399] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.686949] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.189423] env[65503]: DEBUG oslo_vmware.api [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451019, 'name': PowerOnVM_Task, 'duration_secs': 0.371545} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.189806] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1311.189975] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc479b45-905c-407c-9f90-b785bc782d8e tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance '18508c5b-2830-41fd-b92c-675a6d04e6be' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1312.323563] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.324083] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.887093] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "18508c5b-2830-41fd-b92c-675a6d04e6be" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.887367] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.887609] env[65503]: DEBUG nova.compute.manager [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Going to confirm migration 8 {{(pid=65503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1314.324670] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.393346] env[65503]: WARNING neutronclient.v2_0.client [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1314.427563] env[65503]: WARNING neutronclient.v2_0.client [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1314.427954] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.428140] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquired lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.428316] env[65503]: DEBUG nova.network.neutron [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1314.428507] env[65503]: DEBUG nova.objects.instance [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'info_cache' on Instance uuid 18508c5b-2830-41fd-b92c-675a6d04e6be {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1315.323682] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.323933] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1315.434962] env[65503]: WARNING neutronclient.v2_0.client [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1315.435728] env[65503]: WARNING openstack [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1315.436081] env[65503]: WARNING openstack [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1315.545758] env[65503]: WARNING openstack [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1315.546170] env[65503]: WARNING openstack [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1315.625122] env[65503]: WARNING neutronclient.v2_0.client [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1315.625979] env[65503]: WARNING openstack [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1315.626442] env[65503]: WARNING openstack [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1315.706671] env[65503]: DEBUG nova.network.neutron [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [{"id": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "address": "fa:16:3e:c7:ab:46", "network": {"id": "c1fee82f-9c02-4c09-800c-e936d95c7c64", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-989745413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521d40776571452e85178972f97c8622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a7ba8d0-0208-4af7-af44-2a5ad382f9be", "external-id": "nsx-vlan-transportzone-742", "segmentation_id": 742, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7c7a8f-f0", "ovs_interfaceid": "ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1316.209518] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Releasing lock "refresh_cache-18508c5b-2830-41fd-b92c-675a6d04e6be" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.209740] env[65503]: DEBUG nova.objects.instance [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'migration_context' on Instance uuid 18508c5b-2830-41fd-b92c-675a6d04e6be {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1316.714870] env[65503]: DEBUG nova.objects.base [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Object Instance<18508c5b-2830-41fd-b92c-675a6d04e6be> lazy-loaded attributes: info_cache,migration_context {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1316.715932] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9382452-7420-40be-856c-bb277fd11277 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.737479] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b58632a-9add-4bdf-924a-6e66dabc88e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.743779] env[65503]: DEBUG oslo_vmware.api [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1316.743779] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52487ff8-fbc0-b8d1-1b6e-fae44339af4e" [ 1316.743779] env[65503]: _type = "Task" [ 1316.743779] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.752856] env[65503]: DEBUG oslo_vmware.api [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52487ff8-fbc0-b8d1-1b6e-fae44339af4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.255178] env[65503]: DEBUG oslo_vmware.api [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52487ff8-fbc0-b8d1-1b6e-fae44339af4e, 'name': SearchDatastore_Task, 'duration_secs': 0.009147} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.255478] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.255721] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.841121] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa9ca64-6963-4cb5-9e0a-d079c59ecff4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.849411] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d8c15a-a558-46ad-a353-14f059314c51 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.879657] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4c1c23-f2e4-44a7-a247-f1a8c3c02d68 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.887638] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656aa0c7-602f-4430-a9d5-40ad0d0e8bbc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.903020] env[65503]: DEBUG nova.compute.provider_tree [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.323603] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1318.323866] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1318.406046] env[65503]: DEBUG nova.scheduler.client.report [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1318.826685] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.417574] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.161s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.422434] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.595s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.422694] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.422916] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1319.424282] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6360fa19-b841-436e-a408-b128d176300f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.434271] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7f8ad6-11fa-4acd-9d69-9b24459d2127 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.450624] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d7a10c-8216-4ef4-ad4f-9deeee4fcc5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.458229] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91119619-c83e-4bad-b154-6847e1e04fad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.489271] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180287MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1319.489435] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.489616] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.571851] env[65503]: INFO nova.compute.manager [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Rebuilding instance [ 1319.617111] env[65503]: DEBUG nova.compute.manager [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1319.618009] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da53786-21e4-41ff-b236-55a46cab3fd4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.978211] env[65503]: INFO nova.scheduler.client.report [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted allocation for migration f97b7bb4-b78d-4910-acd7-12967444bdb8 [ 1320.460307] env[65503]: INFO nova.compute.manager [None req-8887f34e-72ae-4950-a458-cf3e1627078c tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Get console output [ 1320.460626] env[65503]: WARNING nova.virt.vmwareapi.driver [None req-8887f34e-72ae-4950-a458-cf3e1627078c tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] The console log is missing. Check your VSPC configuration [ 1320.483811] env[65503]: DEBUG oslo_concurrency.lockutils [None req-95a614a1-ecce-4170-8350-fca3a2ef2b7d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.596s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.514409] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1320.514564] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1320.514681] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance a1247f14-ebd5-4097-9532-91ddbc9ff8af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1320.514792] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 5babb2a4-c9a1-412c-8fd2-91880037d119 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1320.514903] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 18508c5b-2830-41fd-b92c-675a6d04e6be actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1320.515098] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1320.515239] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '5', 'num_vm_active': '5', 'num_task_None': '4', 'num_os_type_None': '5', 'num_proj_521d40776571452e85178972f97c8622': '3', 'io_workload': '1', 'num_proj_93906c603f7a4b18a34fc4b42fb6d6c1': '2', 'num_task_rebuilding': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1320.581978] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5c4596-da26-4685-9bc5-4a01416e805f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.590504] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae589d2a-754e-4e14-8f17-372be4c22582 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.621828] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fb005f-392b-43bd-bee7-955ad508469d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.630052] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e01a32-a2c8-4e65-bb77-07a6774d0f00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.634276] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1320.634945] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1e5de4d-4a96-4846-bfb6-a59a9d5f5ff6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.647010] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1320.649603] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1320.649603] env[65503]: value = "task-4451020" [ 1320.649603] env[65503]: _type = "Task" [ 1320.649603] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.659992] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.151512] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1321.165115] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451020, 'name': PowerOffVM_Task, 'duration_secs': 0.188725} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.165892] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.165892] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1321.166584] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da15e07-15a9-44cb-a138-cc0517fccd0e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.174781] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1321.175046] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6d85c54-1274-41f8-88e2-af66cd1865ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.241604] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1321.241771] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1321.241912] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleting the datastore file [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1321.242208] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e947f0b7-4fd3-4968-982c-4e01cd8881d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.249797] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1321.249797] env[65503]: value = "task-4451022" [ 1321.249797] env[65503]: _type = "Task" [ 1321.249797] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.258434] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.660620] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1321.661143] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.171s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.761933] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139007} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.762161] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1321.762346] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1321.762517] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1322.655070] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.655329] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.808883] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1322.809228] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1322.809297] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1322.809473] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1322.809614] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1322.809772] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1322.809976] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1322.810144] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1322.810309] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1322.810469] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1322.810638] env[65503]: DEBUG nova.virt.hardware [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1322.811557] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b79006-6b3f-4a81-b047-6f23ce04c32e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.819968] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25b1a62-9aca-43ae-ae61-71424b669cb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.835964] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:e7:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b331ee4d-cae5-46cf-b647-515091ba1275', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1322.843278] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1322.843531] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1322.843755] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-513fd30c-aa17-438b-97c6-0a857882d99a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.863892] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1322.863892] env[65503]: value = "task-4451023" [ 1322.863892] env[65503]: _type = "Task" [ 1322.863892] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.872507] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451023, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.375062] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451023, 'name': CreateVM_Task, 'duration_secs': 0.297207} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.375062] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1323.375062] env[65503]: WARNING neutronclient.v2_0.client [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 1323.375375] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.375533] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.375893] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1323.376166] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-100ed563-1145-43bf-b9b4-83d977353941 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.380956] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1323.380956] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c1289e-28d5-d927-a3f9-f4e496729485" [ 1323.380956] env[65503]: _type = "Task" [ 1323.380956] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.388595] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c1289e-28d5-d927-a3f9-f4e496729485, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.891357] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c1289e-28d5-d927-a3f9-f4e496729485, 'name': SearchDatastore_Task, 'duration_secs': 0.009198} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.891731] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.891896] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1323.892198] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.892347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.892521] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1323.892798] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b59b308-453f-4084-92c3-bcdb318c6fff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.901686] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1323.901871] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1323.902583] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62e262cb-3b2b-4100-930c-f9bf55677fd9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.908025] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1323.908025] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c7294f-92db-32b2-1247-01a1f8c90939" [ 1323.908025] env[65503]: _type = "Task" [ 1323.908025] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.915725] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c7294f-92db-32b2-1247-01a1f8c90939, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.418841] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c7294f-92db-32b2-1247-01a1f8c90939, 'name': SearchDatastore_Task, 'duration_secs': 0.009293} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.419667] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-492ff32f-ab44-482c-9b65-96b8806a302b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.425933] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1324.425933] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c84975-938a-013d-e81d-45ad9894b8a6" [ 1324.425933] env[65503]: _type = "Task" [ 1324.425933] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.433799] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c84975-938a-013d-e81d-45ad9894b8a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.937123] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52c84975-938a-013d-e81d-45ad9894b8a6, 'name': SearchDatastore_Task, 'duration_secs': 0.01009} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.937497] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.937604] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1324.937877] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bed8a9d-eb4b-4aa7-a628-e7ecd29258ea {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.945141] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1324.945141] env[65503]: value = "task-4451024" [ 1324.945141] env[65503]: _type = "Task" [ 1324.945141] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.953605] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.454924] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447827} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.455209] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1325.455425] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1325.455708] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07eafc03-5b8d-448b-b3db-1623e6da33ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.463085] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1325.463085] env[65503]: value = "task-4451025" [ 1325.463085] env[65503]: _type = "Task" [ 1325.463085] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.471051] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.974188] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06346} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.974559] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1325.975283] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2562444-f043-4fc8-89bf-9fbf315be524 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.997897] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1325.998194] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fb900b3-d876-45a4-9655-a560c3a66cdc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.020607] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1326.020607] env[65503]: value = "task-4451026" [ 1326.020607] env[65503]: _type = "Task" [ 1326.020607] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.029129] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.531313] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451026, 'name': ReconfigVM_Task, 'duration_secs': 0.295307} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.531597] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119/5babb2a4-c9a1-412c-8fd2-91880037d119.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1326.532430] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fe44fcf-fd51-4460-9f32-bb9b0cfe07c4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.539264] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1326.539264] env[65503]: value = "task-4451027" [ 1326.539264] env[65503]: _type = "Task" [ 1326.539264] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.548620] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451027, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.049094] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451027, 'name': Rename_Task, 'duration_secs': 0.1946} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.049461] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.049644] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18716066-8aca-431a-b24f-6e4dc324c3a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.057146] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1327.057146] env[65503]: value = "task-4451028" [ 1327.057146] env[65503]: _type = "Task" [ 1327.057146] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.065576] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.567955] env[65503]: DEBUG oslo_vmware.api [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451028, 'name': PowerOnVM_Task, 'duration_secs': 0.461685} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.568255] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1327.568460] env[65503]: DEBUG nova.compute.manager [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1327.569324] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2226f8-6a8b-4543-a566-4a81c003d841 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.086185] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1328.086185] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.086185] env[65503]: DEBUG nova.objects.instance [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1329.095159] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cf4b9ae5-541b-487b-80dc-a4c5c3087814 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.338126] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "18508c5b-2830-41fd-b92c-675a6d04e6be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.338126] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.338570] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.338570] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.338570] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.342199] env[65503]: INFO nova.compute.manager [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Terminating instance [ 1347.846488] env[65503]: DEBUG nova.compute.manager [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1347.846730] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1347.847032] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76fc6d22-2068-45c5-aa7a-b96d942f022b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.854654] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1347.854654] env[65503]: value = "task-4451029" [ 1347.854654] env[65503]: _type = "Task" [ 1347.854654] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.865347] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.366073] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451029, 'name': PowerOffVM_Task, 'duration_secs': 0.177796} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.366488] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1348.366621] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Volume detach. Driver type: vmdk {{(pid=65503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1348.366813] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870516', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'name': 'volume-30d1da98-0d01-4336-8a09-38e1c3611707', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '18508c5b-2830-41fd-b92c-675a6d04e6be', 'attached_at': '2025-11-14T15:59:38.000000', 'detached_at': '', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'serial': '30d1da98-0d01-4336-8a09-38e1c3611707'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1348.367704] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1068936f-ed67-401f-b091-29bf92ddec6a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.389173] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79c4300-71aa-48da-b203-1e8d2dad7402 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.397430] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bcd443-583e-4537-b8e1-71c90e95c29c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.416797] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5042dc9-d86a-485c-baae-6a72d4919962 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.433145] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] The volume has not been displaced from its original location: [datastore1] volume-30d1da98-0d01-4336-8a09-38e1c3611707/volume-30d1da98-0d01-4336-8a09-38e1c3611707.vmdk. No consolidation needed. {{(pid=65503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1348.438835] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1348.439244] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d92465e-ffef-40d8-9b11-219b11cfd943 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.459533] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1348.459533] env[65503]: value = "task-4451030" [ 1348.459533] env[65503]: _type = "Task" [ 1348.459533] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.469093] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451030, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.970381] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451030, 'name': ReconfigVM_Task, 'duration_secs': 0.171156} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.970624] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1348.975373] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-424b7f93-bc85-433d-861f-5ff3f6c1e476 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.991481] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1348.991481] env[65503]: value = "task-4451031" [ 1348.991481] env[65503]: _type = "Task" [ 1348.991481] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.000478] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451031, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.501733] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451031, 'name': ReconfigVM_Task, 'duration_secs': 0.147595} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.502145] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-870516', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'name': 'volume-30d1da98-0d01-4336-8a09-38e1c3611707', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '18508c5b-2830-41fd-b92c-675a6d04e6be', 'attached_at': '2025-11-14T15:59:38.000000', 'detached_at': '', 'volume_id': '30d1da98-0d01-4336-8a09-38e1c3611707', 'serial': '30d1da98-0d01-4336-8a09-38e1c3611707'} {{(pid=65503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1349.502345] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1349.503145] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a6c37b-838b-4f46-8be5-0dd1d9dfbf2b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.510415] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1349.510667] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bcc7ce6-08e4-41e6-ad75-7447e6b555a4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.573410] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1349.573629] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1349.573836] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleting the datastore file [datastore1] 18508c5b-2830-41fd-b92c-675a6d04e6be {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.574693] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0021bbb8-5807-4712-b947-ebc33073c133 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.581852] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1349.581852] env[65503]: value = "task-4451033" [ 1349.581852] env[65503]: _type = "Task" [ 1349.581852] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.592152] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.092827] env[65503]: DEBUG oslo_vmware.api [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08591} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.093134] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1350.093310] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1350.093484] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1350.093652] env[65503]: INFO nova.compute.manager [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1350.093937] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1350.094203] env[65503]: DEBUG nova.compute.manager [-] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1350.094203] env[65503]: DEBUG nova.network.neutron [-] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1350.094443] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1350.094960] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1350.095242] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1350.131225] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1350.646863] env[65503]: DEBUG nova.compute.manager [req-b6dcdcce-452f-4ae5-9f09-72563d458e65 req-76b66bdd-f12f-469e-8d81-3da4bcd29fce service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Received event network-vif-deleted-ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1350.647841] env[65503]: INFO nova.compute.manager [req-b6dcdcce-452f-4ae5-9f09-72563d458e65 req-76b66bdd-f12f-469e-8d81-3da4bcd29fce service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Neutron deleted interface ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2; detaching it from the instance and deleting it from the info cache [ 1350.647841] env[65503]: DEBUG nova.network.neutron [req-b6dcdcce-452f-4ae5-9f09-72563d458e65 req-76b66bdd-f12f-469e-8d81-3da4bcd29fce service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1351.119034] env[65503]: DEBUG nova.network.neutron [-] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1351.150487] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93dfb7c3-f5be-45de-b4cd-6b3beb6288e7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.161782] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7381ee7c-7ad5-4526-b0df-82699b7a89b9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.190477] env[65503]: DEBUG nova.compute.manager [req-b6dcdcce-452f-4ae5-9f09-72563d458e65 req-76b66bdd-f12f-469e-8d81-3da4bcd29fce service nova] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Detach interface failed, port_id=ba7c7a8f-f054-4f99-a4ac-bd060d6d83c2, reason: Instance 18508c5b-2830-41fd-b92c-675a6d04e6be could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1351.621949] env[65503]: INFO nova.compute.manager [-] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Took 1.53 seconds to deallocate network for instance. [ 1352.167771] env[65503]: INFO nova.compute.manager [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Took 0.55 seconds to detach 1 volumes for instance. [ 1352.169994] env[65503]: DEBUG nova.compute.manager [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Deleting volume: 30d1da98-0d01-4336-8a09-38e1c3611707 {{(pid=65503) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3320}} [ 1352.714768] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.715081] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.715314] env[65503]: DEBUG nova.objects.instance [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'resources' on Instance uuid 18508c5b-2830-41fd-b92c-675a6d04e6be {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1353.299819] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40c8f2d-0fce-481e-a8d0-5c62b46a723c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.307959] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0ed01e-e106-4fe1-9a79-70b4f093627c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.340248] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f346dd4d-13af-4de6-91cf-861267e88495 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.348604] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32598fcb-6dfe-4c1e-b754-06120f3267a7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.363198] env[65503]: DEBUG nova.compute.provider_tree [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1353.866444] env[65503]: DEBUG nova.scheduler.client.report [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1354.371803] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.391962] env[65503]: INFO nova.scheduler.client.report [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted allocations for instance 18508c5b-2830-41fd-b92c-675a6d04e6be [ 1354.899401] env[65503]: DEBUG oslo_concurrency.lockutils [None req-63f982e6-66ad-404c-a155-224f2b57c7eb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "18508c5b-2830-41fd-b92c-675a6d04e6be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.561s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.130703] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.130960] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.131293] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.131531] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.131741] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.134065] env[65503]: INFO nova.compute.manager [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Terminating instance [ 1355.637938] env[65503]: DEBUG nova.compute.manager [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1355.638454] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1355.639378] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b061a87f-d1b3-433c-84c1-93d1938308a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.647873] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1355.648184] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f89f053-fa87-488c-b3d1-c102b7ef8417 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.655286] env[65503]: DEBUG oslo_vmware.api [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1355.655286] env[65503]: value = "task-4451035" [ 1355.655286] env[65503]: _type = "Task" [ 1355.655286] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.665593] env[65503]: DEBUG oslo_vmware.api [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.166611] env[65503]: DEBUG oslo_vmware.api [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451035, 'name': PowerOffVM_Task, 'duration_secs': 0.201488} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.166874] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1356.167038] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1356.167300] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de720263-faef-4cce-8f0f-97e5b688c698 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.238352] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1356.238656] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1356.238720] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleting the datastore file [datastore2] a1247f14-ebd5-4097-9532-91ddbc9ff8af {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1356.239103] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e13754f2-1a6d-4dfb-9f8e-9092b91598e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.247513] env[65503]: DEBUG oslo_vmware.api [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1356.247513] env[65503]: value = "task-4451037" [ 1356.247513] env[65503]: _type = "Task" [ 1356.247513] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.256641] env[65503]: DEBUG oslo_vmware.api [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.758023] env[65503]: DEBUG oslo_vmware.api [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137317} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.758404] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1356.758545] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1356.758651] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1356.758819] env[65503]: INFO nova.compute.manager [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1356.759108] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1356.759312] env[65503]: DEBUG nova.compute.manager [-] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1356.759408] env[65503]: DEBUG nova.network.neutron [-] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1356.759644] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1356.760170] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1356.760457] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1356.819094] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1357.041845] env[65503]: DEBUG nova.compute.manager [req-73565b39-0026-4367-a6ad-1a1336024a66 req-c9833524-e90e-42d1-8e4e-58cf1e41081c service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Received event network-vif-deleted-b9bb3a31-7ee2-4644-8b62-570a11847efa {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1357.042064] env[65503]: INFO nova.compute.manager [req-73565b39-0026-4367-a6ad-1a1336024a66 req-c9833524-e90e-42d1-8e4e-58cf1e41081c service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Neutron deleted interface b9bb3a31-7ee2-4644-8b62-570a11847efa; detaching it from the instance and deleting it from the info cache [ 1357.042237] env[65503]: DEBUG nova.network.neutron [req-73565b39-0026-4367-a6ad-1a1336024a66 req-c9833524-e90e-42d1-8e4e-58cf1e41081c service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1357.524726] env[65503]: DEBUG nova.network.neutron [-] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1357.545154] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24e8f343-0ce3-4529-b59d-9613392e8f73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.555942] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4b8bb5-6b09-439b-8aa2-ac1d9753ed8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.583682] env[65503]: DEBUG nova.compute.manager [req-73565b39-0026-4367-a6ad-1a1336024a66 req-c9833524-e90e-42d1-8e4e-58cf1e41081c service nova] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Detach interface failed, port_id=b9bb3a31-7ee2-4644-8b62-570a11847efa, reason: Instance a1247f14-ebd5-4097-9532-91ddbc9ff8af could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1358.028828] env[65503]: INFO nova.compute.manager [-] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Took 1.27 seconds to deallocate network for instance. [ 1358.535323] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.535602] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.535835] env[65503]: DEBUG nova.objects.instance [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'resources' on Instance uuid a1247f14-ebd5-4097-9532-91ddbc9ff8af {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1359.108945] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ad6bec-f3e0-4570-ab84-91ab8257e4e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.118713] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b940b5-8cd3-4435-9a27-0c5194823559 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.149347] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259b32eb-b1a0-4946-af83-4cf06768bf74 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.157065] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1a0a8d-56b4-4c36-bc77-544291558000 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.170651] env[65503]: DEBUG nova.compute.provider_tree [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.674191] env[65503]: DEBUG nova.scheduler.client.report [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1360.180119] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.643s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.197355] env[65503]: INFO nova.scheduler.client.report [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted allocations for instance a1247f14-ebd5-4097-9532-91ddbc9ff8af [ 1360.706622] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a41bf171-6801-4e14-bc57-485cce7850fb tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "a1247f14-ebd5-4097-9532-91ddbc9ff8af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.575s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.699837] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "5babb2a4-c9a1-412c-8fd2-91880037d119" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.700294] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1364.700371] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "5babb2a4-c9a1-412c-8fd2-91880037d119-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.700515] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1364.700677] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.702822] env[65503]: INFO nova.compute.manager [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Terminating instance [ 1364.863934] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.864245] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1364.864452] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.864626] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1364.864804] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.866912] env[65503]: INFO nova.compute.manager [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Terminating instance [ 1365.206503] env[65503]: DEBUG nova.compute.manager [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1365.206729] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1365.207579] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4fa4f6-89ba-47db-9e32-e33b232cf9c9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.215947] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1365.216214] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-825e0345-b0c9-49c1-bf87-21043b1f8a8a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.222781] env[65503]: DEBUG oslo_vmware.api [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1365.222781] env[65503]: value = "task-4451039" [ 1365.222781] env[65503]: _type = "Task" [ 1365.222781] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.231582] env[65503]: DEBUG oslo_vmware.api [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.371029] env[65503]: DEBUG nova.compute.manager [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1365.371316] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1365.372310] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a56f17a-1ed0-4f42-bfc3-2852140b3a00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.381047] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1365.381364] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35698787-da4a-4286-afd7-99981c09b087 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.389727] env[65503]: DEBUG oslo_vmware.api [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1365.389727] env[65503]: value = "task-4451040" [ 1365.389727] env[65503]: _type = "Task" [ 1365.389727] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.401549] env[65503]: DEBUG oslo_vmware.api [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.733072] env[65503]: DEBUG oslo_vmware.api [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451039, 'name': PowerOffVM_Task, 'duration_secs': 0.194219} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.733072] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1365.733512] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1365.733512] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a929716-4d43-47ea-94c4-8ded040cf383 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.794293] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1365.794521] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1365.794637] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleting the datastore file [datastore1] 5babb2a4-c9a1-412c-8fd2-91880037d119 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1365.794920] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2583397-d2eb-4f47-9545-0c602205495a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.803102] env[65503]: DEBUG oslo_vmware.api [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1365.803102] env[65503]: value = "task-4451042" [ 1365.803102] env[65503]: _type = "Task" [ 1365.803102] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.811719] env[65503]: DEBUG oslo_vmware.api [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.900483] env[65503]: DEBUG oslo_vmware.api [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451040, 'name': PowerOffVM_Task, 'duration_secs': 0.162519} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.900775] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1365.900923] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1365.901208] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e6dad48-3e00-4398-848f-48284bc6e121 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.963062] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1365.963388] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1365.963588] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleting the datastore file [datastore2] d3ca90c9-3dfa-47a5-b48b-67a45ea26021 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1365.963874] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e07f8e1-058e-4e9c-9e36-b022bd00d395 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.971487] env[65503]: DEBUG oslo_vmware.api [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for the task: (returnval){ [ 1365.971487] env[65503]: value = "task-4451044" [ 1365.971487] env[65503]: _type = "Task" [ 1365.971487] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.979639] env[65503]: DEBUG oslo_vmware.api [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.314951] env[65503]: DEBUG oslo_vmware.api [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140526} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.315298] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.315530] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1366.315731] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1366.315954] env[65503]: INFO nova.compute.manager [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1366.316265] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1366.316510] env[65503]: DEBUG nova.compute.manager [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1366.316610] env[65503]: DEBUG nova.network.neutron [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1366.316921] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1366.317687] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1366.318111] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1366.355362] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1366.483430] env[65503]: DEBUG oslo_vmware.api [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Task: {'id': task-4451044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127075} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.483672] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.483850] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1366.484014] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1366.484185] env[65503]: INFO nova.compute.manager [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1366.484418] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1366.484607] env[65503]: DEBUG nova.compute.manager [-] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1366.484697] env[65503]: DEBUG nova.network.neutron [-] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1366.484953] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1366.485455] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1366.485703] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1366.521367] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1366.763489] env[65503]: DEBUG nova.compute.manager [req-cbc8cc7a-cf98-4748-b6ea-76590b8de12c req-1c832516-d7e9-4b85-aa62-afe4746efb0e service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Received event network-vif-deleted-40a9564e-b61f-47ad-9d1b-9494f3514527 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1366.763753] env[65503]: INFO nova.compute.manager [req-cbc8cc7a-cf98-4748-b6ea-76590b8de12c req-1c832516-d7e9-4b85-aa62-afe4746efb0e service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Neutron deleted interface 40a9564e-b61f-47ad-9d1b-9494f3514527; detaching it from the instance and deleting it from the info cache [ 1366.763908] env[65503]: DEBUG nova.network.neutron [req-cbc8cc7a-cf98-4748-b6ea-76590b8de12c req-1c832516-d7e9-4b85-aa62-afe4746efb0e service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1366.843191] env[65503]: DEBUG nova.compute.manager [req-21e48c2a-50c3-43e6-840f-a047b93a9214 req-e6f167c5-261b-4ff0-a58d-d0f19e49b01c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Received event network-vif-deleted-b331ee4d-cae5-46cf-b647-515091ba1275 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1366.843191] env[65503]: INFO nova.compute.manager [req-21e48c2a-50c3-43e6-840f-a047b93a9214 req-e6f167c5-261b-4ff0-a58d-d0f19e49b01c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Neutron deleted interface b331ee4d-cae5-46cf-b647-515091ba1275; detaching it from the instance and deleting it from the info cache [ 1366.843191] env[65503]: DEBUG nova.network.neutron [req-21e48c2a-50c3-43e6-840f-a047b93a9214 req-e6f167c5-261b-4ff0-a58d-d0f19e49b01c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1367.236745] env[65503]: DEBUG nova.network.neutron [-] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1367.266870] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b79ef78-7413-4e9f-afbe-541cb29a5a03 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.276637] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4add0be5-5954-4136-84f6-6f7f4cb2f3d7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.301904] env[65503]: DEBUG nova.compute.manager [req-cbc8cc7a-cf98-4748-b6ea-76590b8de12c req-1c832516-d7e9-4b85-aa62-afe4746efb0e service nova] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Detach interface failed, port_id=40a9564e-b61f-47ad-9d1b-9494f3514527, reason: Instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1367.321052] env[65503]: DEBUG nova.network.neutron [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1367.346752] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21d3e238-e54c-4e62-9ed4-2abf7902134c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.358300] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44eebed-a5f3-46fa-bd4c-45e9015f23b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.383986] env[65503]: DEBUG nova.compute.manager [req-21e48c2a-50c3-43e6-840f-a047b93a9214 req-e6f167c5-261b-4ff0-a58d-d0f19e49b01c service nova] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Detach interface failed, port_id=b331ee4d-cae5-46cf-b647-515091ba1275, reason: Instance 5babb2a4-c9a1-412c-8fd2-91880037d119 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1367.739926] env[65503]: INFO nova.compute.manager [-] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Took 1.25 seconds to deallocate network for instance. [ 1367.824000] env[65503]: INFO nova.compute.manager [-] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Took 1.51 seconds to deallocate network for instance. [ 1368.247652] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.247915] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.248154] env[65503]: DEBUG nova.objects.instance [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lazy-loading 'resources' on Instance uuid d3ca90c9-3dfa-47a5-b48b-67a45ea26021 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.330738] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.805785] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5b2520-4930-4677-aea6-441f61580cae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.814423] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa7bdc2-070b-4a64-84d6-8757eeffc18d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.844985] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64213b34-b80b-4f30-9a60-95b3b232bed9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.853748] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c638455-b3f6-4941-81ee-b565a8c56f83 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.869030] env[65503]: DEBUG nova.compute.provider_tree [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1369.372336] env[65503]: DEBUG nova.scheduler.client.report [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1369.878243] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.630s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1369.880631] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.550s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1369.880867] env[65503]: DEBUG nova.objects.instance [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'resources' on Instance uuid 5babb2a4-c9a1-412c-8fd2-91880037d119 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1369.896873] env[65503]: INFO nova.scheduler.client.report [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Deleted allocations for instance d3ca90c9-3dfa-47a5-b48b-67a45ea26021 [ 1370.402649] env[65503]: DEBUG nova.scheduler.client.report [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Refreshing inventories for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1370.408263] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bb1ba72d-8603-4d16-b56b-3cdc15c93d2d tempest-ServerActionsTestOtherA-1976804656 tempest-ServerActionsTestOtherA-1976804656-project-member] Lock "d3ca90c9-3dfa-47a5-b48b-67a45ea26021" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.544s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.426344] env[65503]: DEBUG nova.scheduler.client.report [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Updating ProviderTree inventory for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1370.426612] env[65503]: DEBUG nova.compute.provider_tree [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Updating inventory in ProviderTree for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1370.439319] env[65503]: DEBUG nova.scheduler.client.report [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Refreshing aggregate associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, aggregates: None {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1370.458540] env[65503]: DEBUG nova.scheduler.client.report [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Refreshing trait associations for resource provider 988ff85a-1d12-41bb-a369-e298e8491ca1, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=65503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1370.494689] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25541808-b5d0-4f55-9e79-4ce9b107f203 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.502710] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a2c547-9708-4776-9b3d-b9e20c03979f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.533733] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9deff772-cd17-4664-821d-98c71e68f2c0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.541918] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0e2763-c710-45ec-bb28-115a900f33eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.557020] env[65503]: DEBUG nova.compute.provider_tree [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.060736] env[65503]: DEBUG nova.scheduler.client.report [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1371.566242] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.595320] env[65503]: INFO nova.scheduler.client.report [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted allocations for instance 5babb2a4-c9a1-412c-8fd2-91880037d119 [ 1372.104293] env[65503]: DEBUG oslo_concurrency.lockutils [None req-6b2134ea-5e82-40eb-8e1b-c1f2f2e9bdca tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "5babb2a4-c9a1-412c-8fd2-91880037d119" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.404s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.323643] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.323957] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.846543] env[65503]: DEBUG nova.compute.manager [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1374.366601] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.366973] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.440628] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "5032f46e-8185-4181-a2d6-b359abddd1ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.441055] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "5032f46e-8185-4181-a2d6-b359abddd1ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.596364] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "261f8d40-439d-4469-b132-cf0448841057" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.596635] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "261f8d40-439d-4469-b132-cf0448841057" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.871846] env[65503]: INFO nova.compute.claims [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.943263] env[65503]: DEBUG nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1375.100053] env[65503]: DEBUG nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1375.324140] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1375.378328] env[65503]: INFO nova.compute.resource_tracker [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating resource usage from migration c9db3d12-fecc-4120-9c38-1374c01543c6 [ 1375.454547] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee490fa-0843-4db1-be3d-818abb950a00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.462526] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1375.463551] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155625a4-8782-4ba9-82e9-1d5c828e1fad {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.494218] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254ff55d-c61c-407b-9d2f-496d011e349a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.502482] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477dda6d-b9d9-47fd-961c-2aca94a08d97 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.516204] env[65503]: DEBUG nova.compute.provider_tree [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.618657] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.019783] env[65503]: DEBUG nova.scheduler.client.report [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1376.525495] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.158s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.525842] env[65503]: INFO nova.compute.manager [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Migrating [ 1376.532705] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.070s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.534671] env[65503]: INFO nova.compute.claims [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.045415] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.045633] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1377.045723] env[65503]: DEBUG nova.network.neutron [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1377.324117] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.324287] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1377.548943] env[65503]: WARNING neutronclient.v2_0.client [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1377.549612] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1377.549696] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1377.617699] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc88ca5f-06aa-4a75-a867-7a79d6e88772 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.626825] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f500ff4-be3a-4076-b4ef-58c7e81dbd07 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.663297] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2678b2ed-c521-444e-913d-22d70f507520 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.672296] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74f4886-26e6-459b-94e3-460d24a28e1a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.687018] env[65503]: DEBUG nova.compute.provider_tree [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.710595] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1377.711059] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1377.771168] env[65503]: WARNING neutronclient.v2_0.client [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1377.771848] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1377.772203] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1377.850796] env[65503]: DEBUG nova.network.neutron [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1378.190423] env[65503]: DEBUG nova.scheduler.client.report [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1378.354105] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1378.696992] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1378.696992] env[65503]: DEBUG nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1378.699382] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.081s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.700803] env[65503]: INFO nova.compute.claims [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1379.206414] env[65503]: DEBUG nova.compute.utils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1379.210755] env[65503]: DEBUG nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 1379.318172] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.323937] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.712023] env[65503]: DEBUG nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1379.783428] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b6c8e0-60a4-4a20-83cd-4f83bfc5d5bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.791898] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdf6602-e1a3-4914-ab2b-5f633626c212 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.822914] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25acc9f7-d938-4b2c-9b23-acb5410ebdfb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.830771] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d669e5-65e6-42b6-8223-0d405f7dda38 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.845446] env[65503]: DEBUG nova.compute.provider_tree [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.869102] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8576df94-63d9-49cb-8dce-14bb47240b9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.888364] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance '0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1380.317973] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.348982] env[65503]: DEBUG nova.scheduler.client.report [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1380.394767] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.395130] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2100ecba-b6cf-491f-8d76-7fa2ba0192bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.403136] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1380.403136] env[65503]: value = "task-4451045" [ 1380.403136] env[65503]: _type = "Task" [ 1380.403136] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.412457] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.725060] env[65503]: DEBUG nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1380.750800] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1380.751063] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1380.751223] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1380.751409] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1380.751550] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1380.751690] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1380.751894] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1380.752072] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1380.752304] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1380.752477] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1380.752648] env[65503]: DEBUG nova.virt.hardware [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1380.753550] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8939b5-1ac7-44cd-b7ff-5c3d41b59626 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.762274] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ddc111-22a3-4b2b-a013-6560e8551265 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.776300] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1380.781862] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Creating folder: Project (f4ffd2fcbb4b470fa7480ff8abc5c12c). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.782160] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b032967-cace-45f3-bc85-4460dc922558 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.793595] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Created folder: Project (f4ffd2fcbb4b470fa7480ff8abc5c12c) in parent group-v870190. [ 1380.793780] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Creating folder: Instances. Parent ref: group-v870523. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.794029] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d5c9efc-5ba2-4e07-a77b-b6c576957669 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.803520] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Created folder: Instances in parent group-v870523. [ 1380.803887] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1380.804198] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1380.804518] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f35e577-11ae-4acc-94f5-60c407ba0b7e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.824409] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.825747] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.825747] env[65503]: value = "task-4451048" [ 1380.825747] env[65503]: _type = "Task" [ 1380.825747] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.835635] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451048, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.854958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.155s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.855518] env[65503]: DEBUG nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1380.918016] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451045, 'name': PowerOffVM_Task, 'duration_secs': 0.177032} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.918450] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1380.918755] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance '0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1381.327802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1381.328099] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1381.328323] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.328500] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1381.329548] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cc553a-fe33-4c1e-af47-d25078d3d297 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.340939] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451048, 'name': CreateVM_Task, 'duration_secs': 0.259519} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.342959] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1381.343482] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.343643] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1381.343973] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1381.345219] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a5799d-65e6-45bf-9b7f-39609d800af9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.349076] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed8f86e-92b0-4a47-8625-77e79da9f16d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.355692] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1381.355692] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e4aab2-c760-adf7-83fd-812b22f13246" [ 1381.355692] env[65503]: _type = "Task" [ 1381.355692] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.365116] env[65503]: DEBUG nova.compute.utils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1381.370679] env[65503]: DEBUG nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Not allocating networking since 'none' was specified. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 1381.371566] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b91b99-43f9-497e-8d84-63623a519eb4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.383295] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52e4aab2-c760-adf7-83fd-812b22f13246, 'name': SearchDatastore_Task, 'duration_secs': 0.014427} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.383661] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1381.383886] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1381.384128] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.384289] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1381.384467] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.385445] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dbceac-6cfe-4f4d-b6e2-8f69818f4806 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.388688] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af9d95a8-d085-4e7e-a75b-df5535c46e7b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.419616] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180130MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1381.419851] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1381.420057] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1381.424284] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.424510] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1381.427445] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1381.427696] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1381.427839] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1381.428051] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1381.428234] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1381.428408] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1381.428640] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1381.428802] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1381.428987] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1381.429167] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1381.429423] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1381.434692] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e72c1d92-32a7-4b7f-a4c2-83519fb4bc8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.437620] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27b2a69e-6031-40b2-8aea-a63a20a75afb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.457142] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1381.457142] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529259b7-a41d-a410-15ed-ddc1679d1eac" [ 1381.457142] env[65503]: _type = "Task" [ 1381.457142] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.459330] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1381.459330] env[65503]: value = "task-4451049" [ 1381.459330] env[65503]: _type = "Task" [ 1381.459330] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.473218] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529259b7-a41d-a410-15ed-ddc1679d1eac, 'name': SearchDatastore_Task, 'duration_secs': 0.010995} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.477331] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451049, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.477582] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd2cd586-1d3c-45cf-959a-8add49c30598 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.484022] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1381.484022] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52191551-d12c-9c70-b1e4-eaad3eb390ab" [ 1381.484022] env[65503]: _type = "Task" [ 1381.484022] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.494228] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52191551-d12c-9c70-b1e4-eaad3eb390ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.868431] env[65503]: DEBUG nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1381.973528] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451049, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.993858] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52191551-d12c-9c70-b1e4-eaad3eb390ab, 'name': SearchDatastore_Task, 'duration_secs': 0.015542} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.994111] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1381.994369] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5032f46e-8185-4181-a2d6-b359abddd1ad/5032f46e-8185-4181-a2d6-b359abddd1ad.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1381.994628] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac819674-9e6c-402d-8942-124802f4d066 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.002189] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1382.002189] env[65503]: value = "task-4451050" [ 1382.002189] env[65503]: _type = "Task" [ 1382.002189] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.009943] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.430505] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Applying migration context for instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 as it has an incoming, in-progress migration c9db3d12-fecc-4120-9c38-1374c01543c6. Migration status is migrating {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1382.431426] env[65503]: INFO nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating resource usage from migration c9db3d12-fecc-4120-9c38-1374c01543c6 [ 1382.449407] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Migration c9db3d12-fecc-4120-9c38-1374c01543c6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 1382.449661] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1382.449822] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 5032f46e-8185-4181-a2d6-b359abddd1ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1382.450009] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 261f8d40-439d-4469-b132-cf0448841057 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1382.450544] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1382.450544] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1344MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '3', 'num_vm_active': '1', 'num_task_resize_migrating': '1', 'num_os_type_None': '3', 'num_proj_93906c603f7a4b18a34fc4b42fb6d6c1': '1', 'io_workload': '3', 'num_vm_building': '2', 'num_task_spawning': '1', 'num_proj_f4ffd2fcbb4b470fa7480ff8abc5c12c': '2', 'num_task_networking': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1382.476162] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451049, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.509578] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82d979e-0a6c-413b-8481-cd237844b65b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.515154] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435126} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.515747] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 5032f46e-8185-4181-a2d6-b359abddd1ad/5032f46e-8185-4181-a2d6-b359abddd1ad.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1382.515977] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1382.516197] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d3a6648-e1bb-47ce-818e-c6ae59a39f21 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.520549] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ca6ec5-7252-4abb-8671-1b1a38b9b257 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.525134] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1382.525134] env[65503]: value = "task-4451051" [ 1382.525134] env[65503]: _type = "Task" [ 1382.525134] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.554026] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f34e25-75d4-4327-94dd-58b2ef2016e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.559566] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.564805] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f613257-236c-445b-94d3-49a8a1c22175 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.578624] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.879113] env[65503]: DEBUG nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1382.900373] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1382.900626] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1382.900776] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1382.900953] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1382.901108] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1382.901254] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1382.901459] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1382.901613] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1382.901772] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1382.901926] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1382.902106] env[65503]: DEBUG nova.virt.hardware [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1382.902964] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbf4721-cb81-4674-b548-5e1b89d8be08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.911322] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe503f-b7fb-4bec-bbfc-f103a5f30089 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.925677] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1382.931265] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1382.931520] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1382.931739] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44a93f33-20b8-4e5e-a123-03a028af70e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.949708] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1382.949708] env[65503]: value = "task-4451052" [ 1382.949708] env[65503]: _type = "Task" [ 1382.949708] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.957884] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451052, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.973578] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451049, 'name': ReconfigVM_Task, 'duration_secs': 1.188736} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.973909] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance '0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1383.035893] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127485} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.036167] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1383.037019] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8665396-7ee1-4e56-b435-b3bdce8d608f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.057582] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 5032f46e-8185-4181-a2d6-b359abddd1ad/5032f46e-8185-4181-a2d6-b359abddd1ad.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.057864] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de913703-1581-498a-ae15-381b1d86d1d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.077738] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1383.077738] env[65503]: value = "task-4451053" [ 1383.077738] env[65503]: _type = "Task" [ 1383.077738] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.083129] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1383.091858] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451053, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.460393] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451052, 'name': CreateVM_Task, 'duration_secs': 0.314705} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.460610] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1383.460977] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.461157] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1383.461517] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1383.461782] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a31aa924-63aa-47be-883e-b96b121e9255 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.467187] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1383.467187] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5215537d-b7b7-9f59-ec1f-9d5a500eebbb" [ 1383.467187] env[65503]: _type = "Task" [ 1383.467187] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.475574] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5215537d-b7b7-9f59-ec1f-9d5a500eebbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.479907] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1383.480137] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1383.480288] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1383.480467] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1383.480636] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1383.480783] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1383.480983] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1383.481204] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1383.481301] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1383.481453] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1383.481637] env[65503]: DEBUG nova.virt.hardware [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1383.486668] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1383.486923] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbb3d917-09d7-4c12-977d-c0265d6e5c39 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.506561] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1383.506561] env[65503]: value = "task-4451054" [ 1383.506561] env[65503]: _type = "Task" [ 1383.506561] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.514894] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451054, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.588443] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1383.588703] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.169s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.588908] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.978759] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5215537d-b7b7-9f59-ec1f-9d5a500eebbb, 'name': SearchDatastore_Task, 'duration_secs': 0.010472} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.979118] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1383.979301] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1383.979534] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.979675] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1383.979847] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1383.980132] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88107f01-0be3-4a16-883f-d4b33ce38b5c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.988818] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1383.989010] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1383.989717] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-528b2b66-04c1-4685-8676-1dedcaec1b8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.995125] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1383.995125] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c9cc8-1103-85fc-4557-7a861d9d359b" [ 1383.995125] env[65503]: _type = "Task" [ 1383.995125] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.003254] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c9cc8-1103-85fc-4557-7a861d9d359b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.015021] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451054, 'name': ReconfigVM_Task, 'duration_secs': 0.167579} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.015284] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1384.016034] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db519d1-2900-46b0-99d9-6b88569fdeb1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.039239] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7/0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1384.039655] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-272f8132-a830-4630-b914-99e984687a9a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.062445] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1384.062445] env[65503]: value = "task-4451055" [ 1384.062445] env[65503]: _type = "Task" [ 1384.062445] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.072151] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451055, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.088954] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.089258] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451053, 'name': ReconfigVM_Task, 'duration_secs': 0.633324} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.089676] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 5032f46e-8185-4181-a2d6-b359abddd1ad/5032f46e-8185-4181-a2d6-b359abddd1ad.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.090329] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bb398b9-5b65-47b2-95d6-f3e49a71cfb2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.097795] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1384.097795] env[65503]: value = "task-4451056" [ 1384.097795] env[65503]: _type = "Task" [ 1384.097795] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.106537] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451056, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.507119] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]526c9cc8-1103-85fc-4557-7a861d9d359b, 'name': SearchDatastore_Task, 'duration_secs': 0.009071} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.507900] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c242a6c-c1f1-4a62-8d28-81fa5f44db67 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.513636] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1384.513636] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ec5874-3705-394a-a5c5-6ffbaa21146f" [ 1384.513636] env[65503]: _type = "Task" [ 1384.513636] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.522448] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ec5874-3705-394a-a5c5-6ffbaa21146f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.573015] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451055, 'name': ReconfigVM_Task, 'duration_secs': 0.287935} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.573330] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7/0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.573621] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance '0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1384.609996] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451056, 'name': Rename_Task, 'duration_secs': 0.128827} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.609996] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1384.610235] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c07e352-3afc-4aa8-b3e6-a77ca5185efb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.621537] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1384.621537] env[65503]: value = "task-4451057" [ 1384.621537] env[65503]: _type = "Task" [ 1384.621537] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.630440] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.025429] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52ec5874-3705-394a-a5c5-6ffbaa21146f, 'name': SearchDatastore_Task, 'duration_secs': 0.010424} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.025856] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1385.025960] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1385.026292] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e68588e1-6509-4c98-bb4f-b6360f3f2ccf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.034439] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1385.034439] env[65503]: value = "task-4451058" [ 1385.034439] env[65503]: _type = "Task" [ 1385.034439] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.043545] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.081127] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1a874e-20bb-4022-834a-75590a061a00 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.100760] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d509e425-a4f4-4fb6-8503-542e0d4f350a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.118828] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance '0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1385.130913] env[65503]: DEBUG oslo_vmware.api [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451057, 'name': PowerOnVM_Task, 'duration_secs': 0.414776} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.131192] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1385.131390] env[65503]: INFO nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Took 4.41 seconds to spawn the instance on the hypervisor. [ 1385.131563] env[65503]: DEBUG nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1385.132331] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125b6e5a-148f-45e8-b24d-a3e4731935cd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.547509] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451058, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441941} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.547723] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1385.547925] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1385.548198] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22edc932-3dd6-4931-9be8-a5df6587ce87 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.555602] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1385.555602] env[65503]: value = "task-4451059" [ 1385.555602] env[65503]: _type = "Task" [ 1385.555602] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.565492] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.627803] env[65503]: WARNING neutronclient.v2_0.client [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1385.648712] env[65503]: INFO nova.compute.manager [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Took 10.20 seconds to build instance. [ 1385.671653] env[65503]: DEBUG nova.network.neutron [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Port 038e2362-a15e-4f40-bbd0-5289c014118b binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1386.066332] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061682} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.066694] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1386.067436] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b54c6f-4f37-431b-a232-c48244714382 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.087709] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1386.087999] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0eba878-a28a-4497-a6e6-cf1874517bcb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.108725] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1386.108725] env[65503]: value = "task-4451060" [ 1386.108725] env[65503]: _type = "Task" [ 1386.108725] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.117775] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451060, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.151520] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5a72d512-443d-4db9-9e12-2452fd420eeb tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "5032f46e-8185-4181-a2d6-b359abddd1ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.710s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1386.620022] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451060, 'name': ReconfigVM_Task, 'duration_secs': 0.294012} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.620352] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1386.621012] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4264ed38-5855-4218-b2da-658cd9a28cff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.627761] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1386.627761] env[65503]: value = "task-4451061" [ 1386.627761] env[65503]: _type = "Task" [ 1386.627761] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.637235] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451061, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.691412] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1386.691652] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1386.691797] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.141253] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451061, 'name': Rename_Task, 'duration_secs': 0.14789} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.141684] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1387.142064] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d99034c-84ec-4210-a7f9-9e7c8c87fb4c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.149994] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1387.149994] env[65503]: value = "task-4451062" [ 1387.149994] env[65503]: _type = "Task" [ 1387.149994] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.159288] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.663239] env[65503]: DEBUG oslo_vmware.api [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451062, 'name': PowerOnVM_Task, 'duration_secs': 0.459508} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.663584] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1387.663878] env[65503]: INFO nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Took 4.78 seconds to spawn the instance on the hypervisor. [ 1387.664085] env[65503]: DEBUG nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1387.664997] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b689dd6-7b77-44c6-8765-1b02527763ca {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.702434] env[65503]: WARNING neutronclient.v2_0.client [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1387.754365] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.754567] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1387.754743] env[65503]: DEBUG nova.network.neutron [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1388.185681] env[65503]: INFO nova.compute.manager [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Took 12.58 seconds to build instance. [ 1388.257981] env[65503]: WARNING neutronclient.v2_0.client [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1388.259075] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1388.259620] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1388.385105] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1388.385545] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1388.461104] env[65503]: WARNING neutronclient.v2_0.client [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1388.461787] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1388.462155] env[65503]: WARNING openstack [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1388.558940] env[65503]: DEBUG nova.network.neutron [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1388.580549] env[65503]: INFO nova.compute.manager [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Rebuilding instance [ 1388.626557] env[65503]: DEBUG nova.compute.manager [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1388.627496] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79eed2e-8190-4dd7-8d56-858149a832a3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.687937] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7a370303-520e-4dcc-b8a5-4ac795fe8481 tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "261f8d40-439d-4469-b132-cf0448841057" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.091s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.061752] env[65503]: DEBUG oslo_concurrency.lockutils [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1389.587213] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f8c6e0-953d-43ca-be79-9dc07dfd5317 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.607479] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500805b2-2a8d-40c4-bbe8-9df235bfeb9e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.615299] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance '0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1389.641850] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1389.642124] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4061828-9873-4347-a999-bf52fe6657a2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.650849] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1389.650849] env[65503]: value = "task-4451063" [ 1389.650849] env[65503]: _type = "Task" [ 1389.650849] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.662788] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451063, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.121454] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1390.121719] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6616597a-0bf1-444e-9ea6-b23238044a49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.129740] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1390.129740] env[65503]: value = "task-4451064" [ 1390.129740] env[65503]: _type = "Task" [ 1390.129740] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.138415] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.163327] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451063, 'name': PowerOffVM_Task, 'duration_secs': 0.186401} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.163653] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1390.163977] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1390.164862] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b85304-3994-4576-96c5-7b49354a0d1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.173598] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1390.173872] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d12c3f52-4941-47b9-bed4-935ab56eef14 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.204495] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1390.204711] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1390.204870] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleting the datastore file [datastore1] 261f8d40-439d-4469-b132-cf0448841057 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1390.205184] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30c46892-c2ab-4f41-9315-c6dd1377b4a9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.213547] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1390.213547] env[65503]: value = "task-4451066" [ 1390.213547] env[65503]: _type = "Task" [ 1390.213547] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.223291] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.641572] env[65503]: DEBUG oslo_vmware.api [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451064, 'name': PowerOnVM_Task, 'duration_secs': 0.40487} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.641908] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1390.642052] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bd7600-30d5-4ec6-b889-b88f55313fce tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance '0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1390.724718] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10738} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.724981] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1390.725179] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1390.725348] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1391.762242] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1391.762570] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1391.762655] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1391.762834] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1391.762976] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1391.763135] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1391.763347] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1391.763512] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1391.763678] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1391.763845] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1391.764038] env[65503]: DEBUG nova.virt.hardware [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1391.765291] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7c3f9e-3993-40e5-a482-e4d25275c319 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.776885] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801dadba-9efb-4d59-ba03-2fe221e79697 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.800171] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Instance VIF info [] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1391.805723] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1391.806028] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1391.806267] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27c7f55a-a100-4041-903b-d15d1e04796c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.825469] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1391.825469] env[65503]: value = "task-4451067" [ 1391.825469] env[65503]: _type = "Task" [ 1391.825469] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.835065] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451067, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.336073] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451067, 'name': CreateVM_Task, 'duration_secs': 0.26662} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.336315] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1392.336757] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.336946] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1392.337332] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1392.337591] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96af0693-89a2-47e2-9ac3-ad8fd81ee54d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.343306] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1392.343306] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]524cabe0-26a7-9ec9-5e92-b36c0dff5ba8" [ 1392.343306] env[65503]: _type = "Task" [ 1392.343306] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.352353] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524cabe0-26a7-9ec9-5e92-b36c0dff5ba8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.855233] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]524cabe0-26a7-9ec9-5e92-b36c0dff5ba8, 'name': SearchDatastore_Task, 'duration_secs': 0.010142} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.855670] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1392.855827] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.856092] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.856240] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1392.856479] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.856845] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd5604e8-8ec3-49b1-86ec-c777454db784 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.867443] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.867682] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1392.868582] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d7c59dc-27a2-45e4-bb65-79147d3fb9ee {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.875048] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1392.875048] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527b87ed-b1ad-56ab-37c1-f721623ab201" [ 1392.875048] env[65503]: _type = "Task" [ 1392.875048] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.885295] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527b87ed-b1ad-56ab-37c1-f721623ab201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.386314] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527b87ed-b1ad-56ab-37c1-f721623ab201, 'name': SearchDatastore_Task, 'duration_secs': 0.010818} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.387232] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-237d6a94-fbd5-48e7-802c-23670f2a0aa8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.393311] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1393.393311] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]520fa85e-5368-79f2-65fa-8032220afda5" [ 1393.393311] env[65503]: _type = "Task" [ 1393.393311] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.402177] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520fa85e-5368-79f2-65fa-8032220afda5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.441991] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.442279] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.442470] env[65503]: DEBUG nova.compute.manager [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Going to confirm migration 9 {{(pid=65503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1393.904862] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]520fa85e-5368-79f2-65fa-8032220afda5, 'name': SearchDatastore_Task, 'duration_secs': 0.010191} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.905289] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1393.905423] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1393.905713] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aaec39e2-079f-4b69-9dea-f0c45af01ba0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.913094] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1393.913094] env[65503]: value = "task-4451068" [ 1393.913094] env[65503]: _type = "Task" [ 1393.913094] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.921482] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.948837] env[65503]: WARNING neutronclient.v2_0.client [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1394.026731] env[65503]: WARNING neutronclient.v2_0.client [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1394.027128] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.027290] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.027485] env[65503]: DEBUG nova.network.neutron [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1394.027689] env[65503]: DEBUG nova.objects.instance [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'info_cache' on Instance uuid 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1394.424363] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470413} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.424635] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1394.424846] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1394.425119] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce41d8f2-f44b-4b2b-8612-c61fe30eeeed {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.432274] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1394.432274] env[65503]: value = "task-4451069" [ 1394.432274] env[65503]: _type = "Task" [ 1394.432274] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.441636] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.942745] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06706} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.943248] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1394.943956] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf92a72e-8864-4b79-bd3a-fa7aa5e6530d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.964897] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1394.965226] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22b557c3-2c0b-4f4c-86c5-7906f6ab3f4d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.986528] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1394.986528] env[65503]: value = "task-4451070" [ 1394.986528] env[65503]: _type = "Task" [ 1394.986528] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.995397] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451070, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.033833] env[65503]: WARNING neutronclient.v2_0.client [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1395.034587] env[65503]: WARNING openstack [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1395.034945] env[65503]: WARNING openstack [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1395.157968] env[65503]: WARNING openstack [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1395.158559] env[65503]: WARNING openstack [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1395.232150] env[65503]: WARNING neutronclient.v2_0.client [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1395.232692] env[65503]: WARNING openstack [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1395.233062] env[65503]: WARNING openstack [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1395.318352] env[65503]: DEBUG nova.network.neutron [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [{"id": "038e2362-a15e-4f40-bbd0-5289c014118b", "address": "fa:16:3e:12:d8:4b", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038e2362-a1", "ovs_interfaceid": "038e2362-a15e-4f40-bbd0-5289c014118b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1395.498240] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451070, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.821978] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1395.822271] env[65503]: DEBUG nova.objects.instance [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'migration_context' on Instance uuid 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1395.998144] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451070, 'name': ReconfigVM_Task, 'duration_secs': 0.561265} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.998631] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 261f8d40-439d-4469-b132-cf0448841057/261f8d40-439d-4469-b132-cf0448841057.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1395.999132] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f79fcd79-e54b-4faf-9428-6e3cf97c8f3b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.007475] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1396.007475] env[65503]: value = "task-4451071" [ 1396.007475] env[65503]: _type = "Task" [ 1396.007475] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.016283] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451071, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.325404] env[65503]: DEBUG nova.objects.base [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Object Instance<0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7> lazy-loaded attributes: info_cache,migration_context {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1396.326407] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7bb9db-9492-4b34-b3b5-4c2b54dc6b8f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.348414] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46333bae-b93c-43ea-9573-a3ee0c5a94ae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.355061] env[65503]: DEBUG oslo_vmware.api [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1396.355061] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522fa799-32bb-8cdd-7d22-6321b942ff8a" [ 1396.355061] env[65503]: _type = "Task" [ 1396.355061] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.364164] env[65503]: DEBUG oslo_vmware.api [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522fa799-32bb-8cdd-7d22-6321b942ff8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.518063] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451071, 'name': Rename_Task, 'duration_secs': 0.131884} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.518453] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1396.518760] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19ec7804-5d6d-499a-9e1e-f36323c33b73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.525534] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1396.525534] env[65503]: value = "task-4451072" [ 1396.525534] env[65503]: _type = "Task" [ 1396.525534] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.533736] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.866284] env[65503]: DEBUG oslo_vmware.api [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522fa799-32bb-8cdd-7d22-6321b942ff8a, 'name': SearchDatastore_Task, 'duration_secs': 0.01081} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.866610] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.866835] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1397.036711] env[65503]: DEBUG oslo_vmware.api [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451072, 'name': PowerOnVM_Task, 'duration_secs': 0.439401} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.037096] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1397.037350] env[65503]: DEBUG nova.compute.manager [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1397.038276] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b67f2f-f06e-4df4-9a12-5138a1d65659 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.439364] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52477f80-d12b-41b6-9f52-892806a89629 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.448065] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a318ebf8-e3fb-4cb6-b833-e4688a2cfaa5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.479212] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8231e553-75a9-4fa7-9df8-94096e7bfdfb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.487823] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d71b99d-282d-46e5-8de2-7f348ccb15b1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.503153] env[65503]: DEBUG nova.compute.provider_tree [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.554131] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.006391] env[65503]: DEBUG nova.scheduler.client.report [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1398.480334] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "261f8d40-439d-4469-b132-cf0448841057" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.480767] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "261f8d40-439d-4469-b132-cf0448841057" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.480870] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "261f8d40-439d-4469-b132-cf0448841057-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.481038] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "261f8d40-439d-4469-b132-cf0448841057-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.481229] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "261f8d40-439d-4469-b132-cf0448841057-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.483625] env[65503]: INFO nova.compute.manager [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Terminating instance [ 1398.988144] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "refresh_cache-261f8d40-439d-4469-b132-cf0448841057" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.988456] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "refresh_cache-261f8d40-439d-4469-b132-cf0448841057" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.988671] env[65503]: DEBUG nova.network.neutron [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1399.016933] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.150s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1399.019784] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.466s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1399.019964] env[65503]: DEBUG nova.objects.instance [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1399.492243] env[65503]: WARNING neutronclient.v2_0.client [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1399.492999] env[65503]: WARNING openstack [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1399.493373] env[65503]: WARNING openstack [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1399.516344] env[65503]: DEBUG nova.network.neutron [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1399.579772] env[65503]: DEBUG nova.network.neutron [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1399.587412] env[65503]: INFO nova.scheduler.client.report [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted allocation for migration c9db3d12-fecc-4120-9c38-1374c01543c6 [ 1400.035142] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3c06adcd-7202-47a7-ae3e-d813c23e322d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.082386] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "refresh_cache-261f8d40-439d-4469-b132-cf0448841057" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1400.082813] env[65503]: DEBUG nova.compute.manager [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1400.083009] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1400.083914] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9648a5e-31b7-4dc5-9124-1fce5a1a19eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.094166] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1b1d4c5a-1a16-4db4-8c58-b2d15e18cf71 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.651s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.094538] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1400.094827] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29231983-2596-48cc-aa57-4173feed67f5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.104538] env[65503]: DEBUG oslo_vmware.api [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1400.104538] env[65503]: value = "task-4451073" [ 1400.104538] env[65503]: _type = "Task" [ 1400.104538] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.117102] env[65503]: DEBUG oslo_vmware.api [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.549075] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1400.549510] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.549616] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1400.549831] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.549988] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.552222] env[65503]: INFO nova.compute.manager [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Terminating instance [ 1400.614336] env[65503]: DEBUG oslo_vmware.api [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451073, 'name': PowerOffVM_Task, 'duration_secs': 0.192153} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.614585] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1400.614747] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1400.615057] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4f1e9f0-0906-497e-b075-88287c97d556 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.642135] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1400.642381] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1400.642554] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleting the datastore file [datastore1] 261f8d40-439d-4469-b132-cf0448841057 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1400.642839] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44079519-8a5b-427e-bfc2-787565ec65fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.650454] env[65503]: DEBUG oslo_vmware.api [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1400.650454] env[65503]: value = "task-4451075" [ 1400.650454] env[65503]: _type = "Task" [ 1400.650454] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.659064] env[65503]: DEBUG oslo_vmware.api [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.056495] env[65503]: DEBUG nova.compute.manager [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1401.056661] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1401.057570] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aaaf0c8-31f0-4a73-96c0-9dbdae0a43ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.065902] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1401.066189] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8d233ec-de84-4e13-bca2-af9ae6ff624d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.073545] env[65503]: DEBUG oslo_vmware.api [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1401.073545] env[65503]: value = "task-4451076" [ 1401.073545] env[65503]: _type = "Task" [ 1401.073545] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.083569] env[65503]: DEBUG oslo_vmware.api [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.161246] env[65503]: DEBUG oslo_vmware.api [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.332101} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.161525] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1401.161712] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1401.161907] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1401.162130] env[65503]: INFO nova.compute.manager [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 261f8d40-439d-4469-b132-cf0448841057] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1401.162417] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1401.162631] env[65503]: DEBUG nova.compute.manager [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1401.162729] env[65503]: DEBUG nova.network.neutron [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1401.163016] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1401.163571] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1401.163829] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1401.186208] env[65503]: DEBUG nova.network.neutron [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1401.186480] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1401.584209] env[65503]: DEBUG oslo_vmware.api [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451076, 'name': PowerOffVM_Task, 'duration_secs': 0.234327} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.584621] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1401.584621] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1401.584976] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70084fac-2e13-45ef-80ee-fa54ecbb0e42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.646428] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1401.646652] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1401.646828] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleting the datastore file [datastore2] 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1401.647122] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6296820d-7f98-4cad-8b62-022c6883a83a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.655175] env[65503]: DEBUG oslo_vmware.api [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1401.655175] env[65503]: value = "task-4451078" [ 1401.655175] env[65503]: _type = "Task" [ 1401.655175] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.663484] env[65503]: DEBUG oslo_vmware.api [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.689179] env[65503]: DEBUG nova.network.neutron [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1402.165438] env[65503]: DEBUG oslo_vmware.api [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264782} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.165715] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1402.165896] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1402.166081] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1402.166258] env[65503]: INFO nova.compute.manager [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1402.166538] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1402.166738] env[65503]: DEBUG nova.compute.manager [-] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1402.166833] env[65503]: DEBUG nova.network.neutron [-] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1402.167086] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1402.167604] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1402.167860] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1402.191956] env[65503]: INFO nova.compute.manager [-] [instance: 261f8d40-439d-4469-b132-cf0448841057] Took 1.03 seconds to deallocate network for instance. [ 1402.243073] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1402.699197] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.699961] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.699961] env[65503]: DEBUG nova.objects.instance [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lazy-loading 'resources' on Instance uuid 261f8d40-439d-4469-b132-cf0448841057 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1402.705942] env[65503]: DEBUG nova.compute.manager [req-3fbfc44f-dfbe-4e81-a9af-7b65ed242884 req-cd13bb9c-1b42-4614-ac71-99dcadc70c6d service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Received event network-vif-deleted-038e2362-a15e-4f40-bbd0-5289c014118b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1402.706250] env[65503]: INFO nova.compute.manager [req-3fbfc44f-dfbe-4e81-a9af-7b65ed242884 req-cd13bb9c-1b42-4614-ac71-99dcadc70c6d service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Neutron deleted interface 038e2362-a15e-4f40-bbd0-5289c014118b; detaching it from the instance and deleting it from the info cache [ 1402.706564] env[65503]: DEBUG nova.network.neutron [req-3fbfc44f-dfbe-4e81-a9af-7b65ed242884 req-cd13bb9c-1b42-4614-ac71-99dcadc70c6d service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1403.178936] env[65503]: DEBUG nova.network.neutron [-] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1403.212208] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-125c4577-6738-4635-ae9e-875c7dd2826f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.222865] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d725ee53-1c98-4d6e-8eed-8f25949f0b1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.252747] env[65503]: DEBUG nova.compute.manager [req-3fbfc44f-dfbe-4e81-a9af-7b65ed242884 req-cd13bb9c-1b42-4614-ac71-99dcadc70c6d service nova] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Detach interface failed, port_id=038e2362-a15e-4f40-bbd0-5289c014118b, reason: Instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1403.282076] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0f512c-a6ef-43d6-86a3-3b4587567b79 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.291507] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c36be77-3415-4a04-bc75-43dab24574e4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.324727] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ab54fc-2664-47b4-94c8-180a061bfb3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.333426] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4e395b-185a-4850-94a9-46a79c0db3e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.351401] env[65503]: DEBUG nova.compute.provider_tree [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1403.682120] env[65503]: INFO nova.compute.manager [-] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Took 1.52 seconds to deallocate network for instance. [ 1403.854388] env[65503]: DEBUG nova.scheduler.client.report [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1404.188972] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.359352] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.362063] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.173s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.362284] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.383536] env[65503]: INFO nova.scheduler.client.report [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted allocations for instance 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7 [ 1404.385396] env[65503]: INFO nova.scheduler.client.report [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleted allocations for instance 261f8d40-439d-4469-b132-cf0448841057 [ 1404.895735] env[65503]: DEBUG oslo_concurrency.lockutils [None req-f55983d2-c57d-4f95-9aba-91f73b31c03e tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.346s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.897761] env[65503]: DEBUG oslo_concurrency.lockutils [None req-cd834487-244a-4780-a9a2-9561335e931d tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "261f8d40-439d-4469-b132-cf0448841057" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.416s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1406.048045] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "5032f46e-8185-4181-a2d6-b359abddd1ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.048407] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "5032f46e-8185-4181-a2d6-b359abddd1ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.049018] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "5032f46e-8185-4181-a2d6-b359abddd1ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.049237] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "5032f46e-8185-4181-a2d6-b359abddd1ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.049408] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "5032f46e-8185-4181-a2d6-b359abddd1ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1406.051507] env[65503]: INFO nova.compute.manager [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Terminating instance [ 1406.544121] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.544358] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.554379] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "refresh_cache-5032f46e-8185-4181-a2d6-b359abddd1ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.554543] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquired lock "refresh_cache-5032f46e-8185-4181-a2d6-b359abddd1ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1406.554712] env[65503]: DEBUG nova.network.neutron [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1407.047225] env[65503]: DEBUG nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1407.056993] env[65503]: WARNING neutronclient.v2_0.client [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1407.057652] env[65503]: WARNING openstack [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1407.057993] env[65503]: WARNING openstack [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1407.079894] env[65503]: DEBUG nova.network.neutron [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1407.143179] env[65503]: DEBUG nova.network.neutron [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1407.570254] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.570512] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.572060] env[65503]: INFO nova.compute.claims [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1407.645279] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Releasing lock "refresh_cache-5032f46e-8185-4181-a2d6-b359abddd1ad" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1407.645785] env[65503]: DEBUG nova.compute.manager [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1407.645984] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1407.646928] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b94ca7-3755-4dea-9883-5093f9531647 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.655637] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1407.655800] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6669cbd-08f7-4773-afa0-969990c99a2f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.663044] env[65503]: DEBUG oslo_vmware.api [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1407.663044] env[65503]: value = "task-4451079" [ 1407.663044] env[65503]: _type = "Task" [ 1407.663044] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.672297] env[65503]: DEBUG oslo_vmware.api [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.173826] env[65503]: DEBUG oslo_vmware.api [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451079, 'name': PowerOffVM_Task, 'duration_secs': 0.123669} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.174205] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1408.174275] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1408.174487] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd9b7d7d-ca36-469d-b1df-1d08232b82cc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.204030] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1408.204262] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1408.204444] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleting the datastore file [datastore2] 5032f46e-8185-4181-a2d6-b359abddd1ad {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1408.204715] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a2bf09d-b3a4-42fe-b658-ebae9986c503 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.212195] env[65503]: DEBUG oslo_vmware.api [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for the task: (returnval){ [ 1408.212195] env[65503]: value = "task-4451081" [ 1408.212195] env[65503]: _type = "Task" [ 1408.212195] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.221696] env[65503]: DEBUG oslo_vmware.api [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.617756] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d467a7-a1b3-4091-bd10-ec7380b44b3f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.625835] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf884255-346e-4b92-8c8b-4abac28702f3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.656901] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fbf655-6d56-45f7-b910-3a842d81c2d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.664920] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206aaa1d-2243-45ae-a8af-9fbcc863e490 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.679576] env[65503]: DEBUG nova.compute.provider_tree [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1408.721872] env[65503]: DEBUG oslo_vmware.api [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Task: {'id': task-4451081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109889} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.722125] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1408.722305] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1408.722473] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1408.722635] env[65503]: INFO nova.compute.manager [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1408.723013] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1408.723245] env[65503]: DEBUG nova.compute.manager [-] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1408.723339] env[65503]: DEBUG nova.network.neutron [-] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1408.723573] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1408.724087] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1408.724334] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1408.745230] env[65503]: DEBUG nova.network.neutron [-] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1408.745469] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1409.182832] env[65503]: DEBUG nova.scheduler.client.report [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1409.247853] env[65503]: DEBUG nova.network.neutron [-] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1409.689406] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.119s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1409.690095] env[65503]: DEBUG nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1409.750900] env[65503]: INFO nova.compute.manager [-] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Took 1.03 seconds to deallocate network for instance. [ 1410.196062] env[65503]: DEBUG nova.compute.utils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1410.197572] env[65503]: DEBUG nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1410.197767] env[65503]: DEBUG nova.network.neutron [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1410.198091] env[65503]: WARNING neutronclient.v2_0.client [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1410.198399] env[65503]: WARNING neutronclient.v2_0.client [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1410.198994] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1410.199359] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1410.246194] env[65503]: DEBUG nova.policy [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af1890ab617d443e985db57a798cac5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93906c603f7a4b18a34fc4b42fb6d6c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1410.257582] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.257894] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.258145] env[65503]: DEBUG nova.objects.instance [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lazy-loading 'resources' on Instance uuid 5032f46e-8185-4181-a2d6-b359abddd1ad {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.589392] env[65503]: DEBUG nova.network.neutron [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Successfully created port: 38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1410.706761] env[65503]: DEBUG nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1410.805309] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfefa906-51b4-4f42-bb11-a31570c6cc1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.814138] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aec0652-8eb6-4e5c-83c7-f0eeef1bbc64 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.846879] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30a6233-d460-4157-8b15-87ddd1278505 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.855364] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c987ed86-d899-4fa7-80cb-aaecbec38462 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.869961] env[65503]: DEBUG nova.compute.provider_tree [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.373422] env[65503]: DEBUG nova.scheduler.client.report [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1411.716937] env[65503]: DEBUG nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1411.743130] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1411.743374] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1411.743521] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1411.743696] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1411.743833] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1411.743973] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1411.744191] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1411.744344] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1411.744504] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1411.744657] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1411.744823] env[65503]: DEBUG nova.virt.hardware [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1411.745689] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970fe7bb-d63d-4c70-b7e9-5fd397957bf0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.754502] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bebb33-5e0d-4b43-a302-434bb8b34710 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.878613] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.904040] env[65503]: INFO nova.scheduler.client.report [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Deleted allocations for instance 5032f46e-8185-4181-a2d6-b359abddd1ad [ 1412.047392] env[65503]: DEBUG nova.compute.manager [req-8772edf3-7eb7-421d-8ab8-ff72d413050e req-65ef27bb-a993-4d97-9914-e9f3b402a823 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Received event network-vif-plugged-38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1412.047392] env[65503]: DEBUG oslo_concurrency.lockutils [req-8772edf3-7eb7-421d-8ab8-ff72d413050e req-65ef27bb-a993-4d97-9914-e9f3b402a823 service nova] Acquiring lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1412.047572] env[65503]: DEBUG oslo_concurrency.lockutils [req-8772edf3-7eb7-421d-8ab8-ff72d413050e req-65ef27bb-a993-4d97-9914-e9f3b402a823 service nova] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.047886] env[65503]: DEBUG oslo_concurrency.lockutils [req-8772edf3-7eb7-421d-8ab8-ff72d413050e req-65ef27bb-a993-4d97-9914-e9f3b402a823 service nova] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.047986] env[65503]: DEBUG nova.compute.manager [req-8772edf3-7eb7-421d-8ab8-ff72d413050e req-65ef27bb-a993-4d97-9914-e9f3b402a823 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] No waiting events found dispatching network-vif-plugged-38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1412.048184] env[65503]: WARNING nova.compute.manager [req-8772edf3-7eb7-421d-8ab8-ff72d413050e req-65ef27bb-a993-4d97-9914-e9f3b402a823 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Received unexpected event network-vif-plugged-38fa3af1-f940-49c8-b96a-07349687620f for instance with vm_state building and task_state spawning. [ 1412.137939] env[65503]: DEBUG nova.network.neutron [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Successfully updated port: 38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1412.413216] env[65503]: DEBUG oslo_concurrency.lockutils [None req-7b0ba0b8-9f5d-4166-bac9-fb001289932f tempest-ServerShowV247Test-346927715 tempest-ServerShowV247Test-346927715-project-member] Lock "5032f46e-8185-4181-a2d6-b359abddd1ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.365s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.640514] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.640699] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1412.640899] env[65503]: DEBUG nova.network.neutron [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1413.144178] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1413.144590] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1413.192674] env[65503]: DEBUG nova.network.neutron [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1413.212499] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1413.212904] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1413.274095] env[65503]: WARNING neutronclient.v2_0.client [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1413.274978] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1413.275339] env[65503]: WARNING openstack [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1413.355474] env[65503]: DEBUG nova.network.neutron [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1413.858197] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1413.858640] env[65503]: DEBUG nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Instance network_info: |[{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1413.859134] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:bb:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38fa3af1-f940-49c8-b96a-07349687620f', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1413.867202] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1413.867452] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1413.867708] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c66ac34c-cb3c-44b6-9545-b8b9abae9940 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.889477] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1413.889477] env[65503]: value = "task-4451082" [ 1413.889477] env[65503]: _type = "Task" [ 1413.889477] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.899251] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451082, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.110503] env[65503]: DEBUG nova.compute.manager [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Received event network-changed-38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1414.110696] env[65503]: DEBUG nova.compute.manager [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Refreshing instance network info cache due to event network-changed-38fa3af1-f940-49c8-b96a-07349687620f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1414.110907] env[65503]: DEBUG oslo_concurrency.lockutils [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Acquiring lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.111058] env[65503]: DEBUG oslo_concurrency.lockutils [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Acquired lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.111220] env[65503]: DEBUG nova.network.neutron [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Refreshing network info cache for port 38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1414.401223] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451082, 'name': CreateVM_Task} progress is 99%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.618352] env[65503]: WARNING neutronclient.v2_0.client [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1414.619163] env[65503]: WARNING openstack [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1414.619551] env[65503]: WARNING openstack [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1414.727673] env[65503]: WARNING openstack [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1414.728173] env[65503]: WARNING openstack [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1414.805563] env[65503]: WARNING neutronclient.v2_0.client [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1414.806267] env[65503]: WARNING openstack [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1414.806645] env[65503]: WARNING openstack [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1414.898266] env[65503]: DEBUG nova.network.neutron [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updated VIF entry in instance network info cache for port 38fa3af1-f940-49c8-b96a-07349687620f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1414.898266] env[65503]: DEBUG nova.network.neutron [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1414.904492] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451082, 'name': CreateVM_Task, 'duration_secs': 0.578051} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.904936] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1414.905484] env[65503]: WARNING neutronclient.v2_0.client [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1414.905906] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.906116] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.906476] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1414.908044] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-112d4f66-c36f-43d2-9483-c57b339bf238 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.916014] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1414.916014] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]522039d1-ace3-9d46-2fd8-50490a474720" [ 1414.916014] env[65503]: _type = "Task" [ 1414.916014] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.923828] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522039d1-ace3-9d46-2fd8-50490a474720, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.406953] env[65503]: DEBUG oslo_concurrency.lockutils [req-e1323c90-e42c-419a-a7e6-4e768a694996 req-2a62687b-5b93-41fe-8f39-69922cca0cd5 service nova] Releasing lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.427780] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]522039d1-ace3-9d46-2fd8-50490a474720, 'name': SearchDatastore_Task, 'duration_secs': 0.020706} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.428110] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.428344] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.428618] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.428775] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1415.428952] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.429241] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87364544-94b9-4f2b-b3e0-2fe7471e46bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.439702] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.439847] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1415.440618] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e56e31f-0045-48ac-a20d-aacd5e55ca36 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.446431] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1415.446431] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b9319f-4af6-1356-311e-2a61ae52c6aa" [ 1415.446431] env[65503]: _type = "Task" [ 1415.446431] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.454951] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b9319f-4af6-1356-311e-2a61ae52c6aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.958851] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.959224] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.960262] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b9319f-4af6-1356-311e-2a61ae52c6aa, 'name': SearchDatastore_Task, 'duration_secs': 0.01038} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.961778] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0963887e-18b0-46d6-b20a-785e67e7e1e9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.968958] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1415.968958] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a495a2-5fb7-7332-24bc-c73e84053118" [ 1415.968958] env[65503]: _type = "Task" [ 1415.968958] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.978122] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a495a2-5fb7-7332-24bc-c73e84053118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.461795] env[65503]: DEBUG nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1416.480429] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52a495a2-5fb7-7332-24bc-c73e84053118, 'name': SearchDatastore_Task, 'duration_secs': 0.010599} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.480727] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1416.480996] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1416.482023] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d575a2c-f0d9-4483-b16c-1c4ba05bb356 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.490435] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1416.490435] env[65503]: value = "task-4451083" [ 1416.490435] env[65503]: _type = "Task" [ 1416.490435] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.500633] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.985856] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.986157] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1416.987621] env[65503]: INFO nova.compute.claims [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1417.001043] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458285} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.001302] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1417.001503] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1417.001847] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a60ba20-d198-40ba-ae58-eeed58631c30 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.009570] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1417.009570] env[65503]: value = "task-4451084" [ 1417.009570] env[65503]: _type = "Task" [ 1417.009570] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.021327] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451084, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.520324] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451084, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061307} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.520600] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1417.521403] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00e7a6c-8e0c-470d-a4c0-12568f6a3aa5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.543774] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1417.544394] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af52d9af-a729-4433-8539-acc184723367 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.565299] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1417.565299] env[65503]: value = "task-4451085" [ 1417.565299] env[65503]: _type = "Task" [ 1417.565299] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.575085] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451085, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.036314] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3b61a5-2278-4fb5-a68e-939cd47304bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.044679] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f3e877-78ba-4ced-b333-11c01390116e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.078666] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bfe69e-8d79-4f01-bea8-1b569d8a4cce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.087045] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451085, 'name': ReconfigVM_Task, 'duration_secs': 0.28686} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.088717] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1418.089423] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f81efedc-c3e6-41fc-956f-3f1b6be522fa {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.091928] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d224947b-f5b3-4809-ad7b-bc606050e4b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.108629] env[65503]: DEBUG nova.compute.provider_tree [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1418.112099] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1418.112099] env[65503]: value = "task-4451086" [ 1418.112099] env[65503]: _type = "Task" [ 1418.112099] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.122281] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451086, 'name': Rename_Task} progress is 10%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.614368] env[65503]: DEBUG nova.scheduler.client.report [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1418.626699] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451086, 'name': Rename_Task, 'duration_secs': 0.132944} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.626962] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1418.627222] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60320eb1-ac27-4c31-9b34-93fda93cb6f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.634601] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1418.634601] env[65503]: value = "task-4451087" [ 1418.634601] env[65503]: _type = "Task" [ 1418.634601] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.645808] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.122674] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.136s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1419.123249] env[65503]: DEBUG nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1419.145673] env[65503]: DEBUG oslo_vmware.api [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451087, 'name': PowerOnVM_Task, 'duration_secs': 0.482813} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.146769] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.146769] env[65503]: INFO nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Took 7.43 seconds to spawn the instance on the hypervisor. [ 1419.146769] env[65503]: DEBUG nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1419.148040] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd569433-721c-41a3-abfa-8e5ff8d01f5d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.629218] env[65503]: DEBUG nova.compute.utils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1419.630570] env[65503]: DEBUG nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1419.630753] env[65503]: DEBUG nova.network.neutron [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1419.631454] env[65503]: WARNING neutronclient.v2_0.client [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1419.631781] env[65503]: WARNING neutronclient.v2_0.client [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1419.632359] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1419.632690] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1419.664156] env[65503]: INFO nova.compute.manager [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Took 12.11 seconds to build instance. [ 1419.677364] env[65503]: DEBUG nova.policy [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3299e99bda746c89cd71759d037fd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e7660e16774c408729de84ba5c7534', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1419.959674] env[65503]: DEBUG nova.network.neutron [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Successfully created port: b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1420.140243] env[65503]: DEBUG nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1420.165890] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d074103a-bbd2-4948-a446-705b02a8cb10 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.621s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.910940] env[65503]: DEBUG nova.compute.manager [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Received event network-changed-38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1420.911287] env[65503]: DEBUG nova.compute.manager [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Refreshing instance network info cache due to event network-changed-38fa3af1-f940-49c8-b96a-07349687620f. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1420.911614] env[65503]: DEBUG oslo_concurrency.lockutils [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Acquiring lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.911941] env[65503]: DEBUG oslo_concurrency.lockutils [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Acquired lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1420.913340] env[65503]: DEBUG nova.network.neutron [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Refreshing network info cache for port 38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1421.151736] env[65503]: DEBUG nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1421.180017] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1421.180282] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1421.180431] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1421.180606] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1421.180748] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1421.180886] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1421.181102] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1421.181259] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1421.181418] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1421.181571] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1421.181734] env[65503]: DEBUG nova.virt.hardware [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1421.182887] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b179812a-b4f8-454c-9f34-1fbbc3a94a05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.191284] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c9db43-a5d1-4a20-9c9a-d301dc4e726b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.415283] env[65503]: WARNING neutronclient.v2_0.client [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1421.416081] env[65503]: WARNING openstack [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1421.416472] env[65503]: WARNING openstack [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1421.554152] env[65503]: WARNING openstack [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1421.554551] env[65503]: WARNING openstack [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1421.622282] env[65503]: WARNING neutronclient.v2_0.client [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1421.623050] env[65503]: WARNING openstack [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1421.623475] env[65503]: WARNING openstack [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1421.710985] env[65503]: DEBUG nova.network.neutron [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updated VIF entry in instance network info cache for port 38fa3af1-f940-49c8-b96a-07349687620f. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1421.711476] env[65503]: DEBUG nova.network.neutron [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1421.803597] env[65503]: DEBUG nova.compute.manager [req-e1fff9e9-4619-4328-8f90-68820fa3aa2d req-b4f2fd5c-3d7f-47e4-98c6-33feddc99e25 service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Received event network-vif-plugged-b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1421.803850] env[65503]: DEBUG oslo_concurrency.lockutils [req-e1fff9e9-4619-4328-8f90-68820fa3aa2d req-b4f2fd5c-3d7f-47e4-98c6-33feddc99e25 service nova] Acquiring lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1421.804080] env[65503]: DEBUG oslo_concurrency.lockutils [req-e1fff9e9-4619-4328-8f90-68820fa3aa2d req-b4f2fd5c-3d7f-47e4-98c6-33feddc99e25 service nova] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1421.805063] env[65503]: DEBUG oslo_concurrency.lockutils [req-e1fff9e9-4619-4328-8f90-68820fa3aa2d req-b4f2fd5c-3d7f-47e4-98c6-33feddc99e25 service nova] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.805063] env[65503]: DEBUG nova.compute.manager [req-e1fff9e9-4619-4328-8f90-68820fa3aa2d req-b4f2fd5c-3d7f-47e4-98c6-33feddc99e25 service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] No waiting events found dispatching network-vif-plugged-b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1421.805063] env[65503]: WARNING nova.compute.manager [req-e1fff9e9-4619-4328-8f90-68820fa3aa2d req-b4f2fd5c-3d7f-47e4-98c6-33feddc99e25 service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Received unexpected event network-vif-plugged-b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea for instance with vm_state building and task_state spawning. [ 1421.880785] env[65503]: DEBUG nova.network.neutron [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Successfully updated port: b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1422.215030] env[65503]: DEBUG oslo_concurrency.lockutils [req-6d36e23f-f35e-4b21-ab2d-859aaf026e48 req-caebc936-a9d6-4ff4-a4cf-4db3f8fefec4 service nova] Releasing lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1422.383713] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "refresh_cache-53e5de8f-1a2a-4b17-a3cd-43888dc70be9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.383904] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "refresh_cache-53e5de8f-1a2a-4b17-a3cd-43888dc70be9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1422.384101] env[65503]: DEBUG nova.network.neutron [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1422.887371] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1422.887899] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1422.924015] env[65503]: DEBUG nova.network.neutron [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1422.942848] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1422.943255] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1423.002675] env[65503]: WARNING neutronclient.v2_0.client [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1423.003496] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1423.003871] env[65503]: WARNING openstack [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1423.082081] env[65503]: DEBUG nova.network.neutron [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Updating instance_info_cache with network_info: [{"id": "b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea", "address": "fa:16:3e:d1:85:92", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1f52fdf-8a", "ovs_interfaceid": "b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1423.585556] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "refresh_cache-53e5de8f-1a2a-4b17-a3cd-43888dc70be9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1423.585957] env[65503]: DEBUG nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Instance network_info: |[{"id": "b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea", "address": "fa:16:3e:d1:85:92", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1f52fdf-8a", "ovs_interfaceid": "b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1423.586533] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:85:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1423.594146] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating folder: Project (62e7660e16774c408729de84ba5c7534). Parent ref: group-v870190. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1423.594470] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c461a54e-2b1d-4e03-b6ba-3a9a79241f91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.608476] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created folder: Project (62e7660e16774c408729de84ba5c7534) in parent group-v870190. [ 1423.608845] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating folder: Instances. Parent ref: group-v870529. {{(pid=65503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1423.609179] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5927f7f5-6086-43e4-8c49-c3748c90fad9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.623716] env[65503]: INFO nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created folder: Instances in parent group-v870529. [ 1423.624176] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1423.624441] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1423.624712] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88c664f6-7959-44ca-a8e7-f003fb98aaf8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.647592] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1423.647592] env[65503]: value = "task-4451090" [ 1423.647592] env[65503]: _type = "Task" [ 1423.647592] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.656047] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451090, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.834422] env[65503]: DEBUG nova.compute.manager [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Received event network-changed-b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1423.834851] env[65503]: DEBUG nova.compute.manager [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Refreshing instance network info cache due to event network-changed-b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1423.835118] env[65503]: DEBUG oslo_concurrency.lockutils [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Acquiring lock "refresh_cache-53e5de8f-1a2a-4b17-a3cd-43888dc70be9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.835379] env[65503]: DEBUG oslo_concurrency.lockutils [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Acquired lock "refresh_cache-53e5de8f-1a2a-4b17-a3cd-43888dc70be9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1423.835546] env[65503]: DEBUG nova.network.neutron [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Refreshing network info cache for port b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1424.157909] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451090, 'name': CreateVM_Task, 'duration_secs': 0.300516} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.158147] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1424.158593] env[65503]: WARNING neutronclient.v2_0.client [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1424.158988] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.159164] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1424.159482] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1424.159735] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0860fd9-5a21-48cd-b1e5-c17c659e2352 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.164711] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1424.164711] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b4426e-30f0-b00f-9bec-fc6561153262" [ 1424.164711] env[65503]: _type = "Task" [ 1424.164711] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.172891] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b4426e-30f0-b00f-9bec-fc6561153262, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.338895] env[65503]: WARNING neutronclient.v2_0.client [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1424.339617] env[65503]: WARNING openstack [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1424.340031] env[65503]: WARNING openstack [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1424.429282] env[65503]: WARNING openstack [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1424.429613] env[65503]: WARNING openstack [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1424.487087] env[65503]: WARNING neutronclient.v2_0.client [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1424.487763] env[65503]: WARNING openstack [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1424.488114] env[65503]: WARNING openstack [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1424.567375] env[65503]: DEBUG nova.network.neutron [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Updated VIF entry in instance network info cache for port b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1424.567726] env[65503]: DEBUG nova.network.neutron [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Updating instance_info_cache with network_info: [{"id": "b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea", "address": "fa:16:3e:d1:85:92", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1f52fdf-8a", "ovs_interfaceid": "b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1424.675924] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b4426e-30f0-b00f-9bec-fc6561153262, 'name': SearchDatastore_Task, 'duration_secs': 0.010441} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.676271] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1424.676500] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1424.676738] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.676880] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1424.677065] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.677335] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad7958ae-7676-48ae-86b9-a9ec302f06c0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.687016] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.687200] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1424.687945] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97fe06b2-c3b6-4474-b4c4-e6796bead552 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.694406] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1424.694406] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5262024a-bd59-6ee2-3acb-ce5f9717c544" [ 1424.694406] env[65503]: _type = "Task" [ 1424.694406] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.703813] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5262024a-bd59-6ee2-3acb-ce5f9717c544, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.070959] env[65503]: DEBUG oslo_concurrency.lockutils [req-d424d693-c300-4b46-b2a8-d1c504129caf req-918c3b01-86bb-4b06-9513-d81b954194eb service nova] Releasing lock "refresh_cache-53e5de8f-1a2a-4b17-a3cd-43888dc70be9" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.206989] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5262024a-bd59-6ee2-3acb-ce5f9717c544, 'name': SearchDatastore_Task, 'duration_secs': 0.010609} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.207828] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c557b491-0882-44b4-9f26-1d77962ba3d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.214174] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1425.214174] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b3b60e-4a13-4647-1a08-ea7067069501" [ 1425.214174] env[65503]: _type = "Task" [ 1425.214174] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.223403] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b3b60e-4a13-4647-1a08-ea7067069501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.725017] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52b3b60e-4a13-4647-1a08-ea7067069501, 'name': SearchDatastore_Task, 'duration_secs': 0.009988} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.725420] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.725551] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 53e5de8f-1a2a-4b17-a3cd-43888dc70be9/53e5de8f-1a2a-4b17-a3cd-43888dc70be9.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1425.725825] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dd2f019-c894-4f48-9550-e92012cd1588 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.734667] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1425.734667] env[65503]: value = "task-4451091" [ 1425.734667] env[65503]: _type = "Task" [ 1425.734667] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.743681] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451091, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.245655] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451091, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.746734] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451091, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531466} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.747164] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 53e5de8f-1a2a-4b17-a3cd-43888dc70be9/53e5de8f-1a2a-4b17-a3cd-43888dc70be9.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1426.747252] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1426.747519] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6394c2a0-57ea-4107-8133-85b475065f35 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.758725] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1426.758725] env[65503]: value = "task-4451092" [ 1426.758725] env[65503]: _type = "Task" [ 1426.758725] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.768389] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451092, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.269103] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451092, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.198874} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.269424] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1427.270414] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63beb2da-5462-4650-be2e-e66c30c82aec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.294277] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 53e5de8f-1a2a-4b17-a3cd-43888dc70be9/53e5de8f-1a2a-4b17-a3cd-43888dc70be9.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.294632] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad2e1454-ed78-47de-9d29-f03f316a752d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.315069] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1427.315069] env[65503]: value = "task-4451093" [ 1427.315069] env[65503]: _type = "Task" [ 1427.315069] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.324178] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451093, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.825578] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451093, 'name': ReconfigVM_Task, 'duration_secs': 0.26991} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.825951] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 53e5de8f-1a2a-4b17-a3cd-43888dc70be9/53e5de8f-1a2a-4b17-a3cd-43888dc70be9.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.826457] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-834f48d8-b13f-45f9-b4eb-d9a8d80a3831 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.833761] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1427.833761] env[65503]: value = "task-4451094" [ 1427.833761] env[65503]: _type = "Task" [ 1427.833761] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.842561] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451094, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.346091] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451094, 'name': Rename_Task, 'duration_secs': 0.156021} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.346396] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1428.346654] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfa6ba13-8b97-43bd-8033-1e9c3a048e33 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.354521] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1428.354521] env[65503]: value = "task-4451095" [ 1428.354521] env[65503]: _type = "Task" [ 1428.354521] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.363525] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.869974] env[65503]: DEBUG oslo_vmware.api [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451095, 'name': PowerOnVM_Task, 'duration_secs': 0.478988} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.870444] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1428.870618] env[65503]: INFO nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1428.870897] env[65503]: DEBUG nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1428.872106] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358bd56e-8b94-4d73-93ba-589a52b57185 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.392930] env[65503]: INFO nova.compute.manager [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Took 12.43 seconds to build instance. [ 1429.895069] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c8188226-32cb-4ab7-bd64-3bffd9586de7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.935s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.299679] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.300053] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.802613] env[65503]: DEBUG nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1432.324511] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.324774] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.326285] env[65503]: INFO nova.compute.claims [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1433.386553] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f026de4-864f-4183-b554-77a12780aa6c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.394816] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b60269-f7c4-4e01-ad75-868ccb3e0ddf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.425814] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31eb349b-2985-452e-90ed-7f02e3f43b88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.433876] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d947eef7-dbe2-4c1f-80e7-a0ab60baebf5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.447631] env[65503]: DEBUG nova.compute.provider_tree [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.951305] env[65503]: DEBUG nova.scheduler.client.report [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1434.324339] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.324530] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.456878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.132s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.457450] env[65503]: DEBUG nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1434.961968] env[65503]: DEBUG nova.compute.utils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1434.963427] env[65503]: DEBUG nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1434.963624] env[65503]: DEBUG nova.network.neutron [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1434.963948] env[65503]: WARNING neutronclient.v2_0.client [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1434.964283] env[65503]: WARNING neutronclient.v2_0.client [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1434.964853] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1434.965205] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1434.972992] env[65503]: DEBUG nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1435.027881] env[65503]: DEBUG nova.policy [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3299e99bda746c89cd71759d037fd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e7660e16774c408729de84ba5c7534', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1435.323769] env[65503]: DEBUG nova.network.neutron [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Successfully created port: 5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1435.326364] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.327265] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.327347] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Cleaning up deleted instances with incomplete migration {{(pid=65503) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11954}} [ 1435.982245] env[65503]: DEBUG nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1436.016094] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1436.016389] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1436.016498] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1436.016675] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1436.016814] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1436.016956] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1436.017604] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1436.017604] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1436.017764] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1436.017927] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1436.018327] env[65503]: DEBUG nova.virt.hardware [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1436.019061] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62918b14-96e4-481e-9d8f-e60a38615169 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.028444] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef95a72-0468-4eb3-8055-cb2323e8e296 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.750596] env[65503]: DEBUG nova.compute.manager [req-356f01e4-83bc-491e-ba98-2e6a7ccae0bc req-cf264456-4ec9-48e9-a6f0-dbebbb1d22b4 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Received event network-vif-plugged-5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1436.750766] env[65503]: DEBUG oslo_concurrency.lockutils [req-356f01e4-83bc-491e-ba98-2e6a7ccae0bc req-cf264456-4ec9-48e9-a6f0-dbebbb1d22b4 service nova] Acquiring lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1436.750999] env[65503]: DEBUG oslo_concurrency.lockutils [req-356f01e4-83bc-491e-ba98-2e6a7ccae0bc req-cf264456-4ec9-48e9-a6f0-dbebbb1d22b4 service nova] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1436.751231] env[65503]: DEBUG oslo_concurrency.lockutils [req-356f01e4-83bc-491e-ba98-2e6a7ccae0bc req-cf264456-4ec9-48e9-a6f0-dbebbb1d22b4 service nova] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1436.751397] env[65503]: DEBUG nova.compute.manager [req-356f01e4-83bc-491e-ba98-2e6a7ccae0bc req-cf264456-4ec9-48e9-a6f0-dbebbb1d22b4 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] No waiting events found dispatching network-vif-plugged-5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1436.751556] env[65503]: WARNING nova.compute.manager [req-356f01e4-83bc-491e-ba98-2e6a7ccae0bc req-cf264456-4ec9-48e9-a6f0-dbebbb1d22b4 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Received unexpected event network-vif-plugged-5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 for instance with vm_state building and task_state spawning. [ 1436.831663] env[65503]: DEBUG nova.network.neutron [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Successfully updated port: 5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1437.333929] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "refresh_cache-5643f8b3-249f-45c1-a15c-ee3243ae3b9a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.334162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "refresh_cache-5643f8b3-249f-45c1-a15c-ee3243ae3b9a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1437.334369] env[65503]: DEBUG nova.network.neutron [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1437.826902] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.827139] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1437.837225] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1437.837594] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1437.873183] env[65503]: DEBUG nova.network.neutron [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1437.892229] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1437.892609] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1437.951103] env[65503]: WARNING neutronclient.v2_0.client [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1437.952101] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1437.952501] env[65503]: WARNING openstack [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1438.032695] env[65503]: DEBUG nova.network.neutron [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Updating instance_info_cache with network_info: [{"id": "5e90cf6e-0330-4a1c-8c87-4e3da81c88c0", "address": "fa:16:3e:2b:fc:4e", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e90cf6e-03", "ovs_interfaceid": "5e90cf6e-0330-4a1c-8c87-4e3da81c88c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1438.536161] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "refresh_cache-5643f8b3-249f-45c1-a15c-ee3243ae3b9a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1438.536547] env[65503]: DEBUG nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Instance network_info: |[{"id": "5e90cf6e-0330-4a1c-8c87-4e3da81c88c0", "address": "fa:16:3e:2b:fc:4e", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e90cf6e-03", "ovs_interfaceid": "5e90cf6e-0330-4a1c-8c87-4e3da81c88c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1438.537052] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:fc:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e90cf6e-0330-4a1c-8c87-4e3da81c88c0', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1438.544672] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1438.544897] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1438.545141] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c121b862-b258-48c7-ac98-ada367065969 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.567020] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1438.567020] env[65503]: value = "task-4451096" [ 1438.567020] env[65503]: _type = "Task" [ 1438.567020] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.575036] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451096, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.781660] env[65503]: DEBUG nova.compute.manager [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Received event network-changed-5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1438.781869] env[65503]: DEBUG nova.compute.manager [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Refreshing instance network info cache due to event network-changed-5e90cf6e-0330-4a1c-8c87-4e3da81c88c0. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1438.782105] env[65503]: DEBUG oslo_concurrency.lockutils [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Acquiring lock "refresh_cache-5643f8b3-249f-45c1-a15c-ee3243ae3b9a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.782241] env[65503]: DEBUG oslo_concurrency.lockutils [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Acquired lock "refresh_cache-5643f8b3-249f-45c1-a15c-ee3243ae3b9a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1438.782393] env[65503]: DEBUG nova.network.neutron [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Refreshing network info cache for port 5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1439.076887] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451096, 'name': CreateVM_Task, 'duration_secs': 0.313905} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.077085] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1439.077583] env[65503]: WARNING neutronclient.v2_0.client [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1439.077938] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.078099] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1439.078412] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1439.078665] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2065363-a701-4e01-b075-fe4f7d0d35e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.083595] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1439.083595] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fde643-3152-6a0c-b045-c090358feed9" [ 1439.083595] env[65503]: _type = "Task" [ 1439.083595] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.091613] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fde643-3152-6a0c-b045-c090358feed9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.286044] env[65503]: WARNING neutronclient.v2_0.client [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1439.286470] env[65503]: WARNING openstack [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1439.286860] env[65503]: WARNING openstack [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1439.318302] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.373251] env[65503]: WARNING openstack [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1439.373655] env[65503]: WARNING openstack [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1439.428183] env[65503]: WARNING neutronclient.v2_0.client [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1439.428857] env[65503]: WARNING openstack [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1439.429227] env[65503]: WARNING openstack [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1439.506382] env[65503]: DEBUG nova.network.neutron [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Updated VIF entry in instance network info cache for port 5e90cf6e-0330-4a1c-8c87-4e3da81c88c0. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1439.506724] env[65503]: DEBUG nova.network.neutron [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Updating instance_info_cache with network_info: [{"id": "5e90cf6e-0330-4a1c-8c87-4e3da81c88c0", "address": "fa:16:3e:2b:fc:4e", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e90cf6e-03", "ovs_interfaceid": "5e90cf6e-0330-4a1c-8c87-4e3da81c88c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1439.593954] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52fde643-3152-6a0c-b045-c090358feed9, 'name': SearchDatastore_Task, 'duration_secs': 0.011082} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.594375] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1439.594495] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1439.594727] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.594876] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1439.595060] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1439.595342] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae08388e-4406-47b9-ba65-db0f0d33bd5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.605715] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1439.605916] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1439.606738] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ceb618d-5be2-4154-ad2c-41a1e1992a4b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.613268] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1439.613268] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52de8817-3aae-ef94-35c7-e9cf69a53fe8" [ 1439.613268] env[65503]: _type = "Task" [ 1439.613268] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.622874] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52de8817-3aae-ef94-35c7-e9cf69a53fe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.009938] env[65503]: DEBUG oslo_concurrency.lockutils [req-040229a5-f814-424c-9b77-62dd4eb66d39 req-084ebb48-b9b5-47bd-9e06-26fa9fc00405 service nova] Releasing lock "refresh_cache-5643f8b3-249f-45c1-a15c-ee3243ae3b9a" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1440.123774] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52de8817-3aae-ef94-35c7-e9cf69a53fe8, 'name': SearchDatastore_Task, 'duration_secs': 0.009349} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.124541] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1db58de2-c27a-4e34-bd20-9b1b9c496023 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.129783] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1440.129783] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5215e027-f95e-f542-6163-44d8eb005e42" [ 1440.129783] env[65503]: _type = "Task" [ 1440.129783] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.137252] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5215e027-f95e-f542-6163-44d8eb005e42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.324259] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.640815] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5215e027-f95e-f542-6163-44d8eb005e42, 'name': SearchDatastore_Task, 'duration_secs': 0.009853} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.641177] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1440.641403] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 5643f8b3-249f-45c1-a15c-ee3243ae3b9a/5643f8b3-249f-45c1-a15c-ee3243ae3b9a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1440.641683] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b821c0ca-a8a4-473e-b2a7-9de642da071c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.651467] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1440.651467] env[65503]: value = "task-4451097" [ 1440.651467] env[65503]: _type = "Task" [ 1440.651467] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.660196] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.827680] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.827941] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.828129] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.828286] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1440.829493] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8180908-7912-41ad-90f2-7a1e7a0abcb9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.838410] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e7fa2e-2d23-4c2a-9fe5-251966408bb0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.854598] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a52091-7ea4-4f23-aee4-c24529dc8743 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.862986] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9745b6-b8d3-4e64-8d8f-68a0c91135e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.895285] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180614MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1440.895469] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.895604] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1441.165071] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451097, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453789} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.165320] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 5643f8b3-249f-45c1-a15c-ee3243ae3b9a/5643f8b3-249f-45c1-a15c-ee3243ae3b9a.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1441.165530] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1441.165800] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2fabde0-deb6-4ec7-8dd4-1b1b091626d3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.172250] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1441.172250] env[65503]: value = "task-4451098" [ 1441.172250] env[65503]: _type = "Task" [ 1441.172250] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.180946] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451098, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.681660] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451098, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071254} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.682029] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1441.682691] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a618e8-f44b-4a7e-becc-507fb06e3032 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.704754] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 5643f8b3-249f-45c1-a15c-ee3243ae3b9a/5643f8b3-249f-45c1-a15c-ee3243ae3b9a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1441.705028] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5826ff6a-a5cb-47ce-b686-188856f36cbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.725956] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1441.725956] env[65503]: value = "task-4451099" [ 1441.725956] env[65503]: _type = "Task" [ 1441.725956] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.734710] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.958738] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 3b7a6813-c1fa-4f51-ae17-cc147a8c809d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1441.959012] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 53e5de8f-1a2a-4b17-a3cd-43888dc70be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1441.959227] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 5643f8b3-249f-45c1-a15c-ee3243ae3b9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1441.959553] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1441.959761] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=100GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '3', 'num_vm_active': '2', 'num_task_None': '2', 'num_os_type_None': '3', 'num_proj_93906c603f7a4b18a34fc4b42fb6d6c1': '1', 'io_workload': '1', 'num_proj_62e7660e16774c408729de84ba5c7534': '2', 'num_vm_building': '1', 'num_task_spawning': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1442.011891] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50e0f88-0a36-4a19-affc-e255c716a823 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.019968] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b591cd-877d-4772-8bf0-bd351d601b51 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.051218] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8660bb40-4d38-4918-ba38-cc6d3b2e256c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.059667] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22bb626-77ac-426d-b540-634ae2fe5c42 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.073524] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.236998] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451099, 'name': ReconfigVM_Task, 'duration_secs': 0.285087} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.237293] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 5643f8b3-249f-45c1-a15c-ee3243ae3b9a/5643f8b3-249f-45c1-a15c-ee3243ae3b9a.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1442.237920] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-666210d0-1751-4f57-8eb6-eb6a32df1766 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.245171] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1442.245171] env[65503]: value = "task-4451100" [ 1442.245171] env[65503]: _type = "Task" [ 1442.245171] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.253793] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451100, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.577106] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1442.756081] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451100, 'name': Rename_Task, 'duration_secs': 0.277987} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.756469] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1442.756619] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48ee9a26-3c79-4217-8bdb-71659f3ec89f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.763316] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1442.763316] env[65503]: value = "task-4451101" [ 1442.763316] env[65503]: _type = "Task" [ 1442.763316] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.771408] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.082170] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1443.082533] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.187s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.082910] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.083160] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Cleaning up deleted instances {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11916}} [ 1443.273988] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451101, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.592063] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] There are 21 instances to clean {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11925}} [ 1443.592063] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 261f8d40-439d-4469-b132-cf0448841057] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1443.775232] env[65503]: DEBUG oslo_vmware.api [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451101, 'name': PowerOnVM_Task, 'duration_secs': 0.631433} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.775630] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1443.775770] env[65503]: INFO nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Took 7.79 seconds to spawn the instance on the hypervisor. [ 1443.775878] env[65503]: DEBUG nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1443.776682] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99903c3a-d6f6-4811-90b3-ddb5bd44304f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.095943] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 5032f46e-8185-4181-a2d6-b359abddd1ad] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1444.295968] env[65503]: INFO nova.compute.manager [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Took 11.99 seconds to build instance. [ 1444.599627] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: c621ac90-4619-4e67-9494-a8817744a4be] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1444.798178] env[65503]: DEBUG oslo_concurrency.lockutils [None req-841504a2-e015-4618-a2b0-f2001f2f056d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.498s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.104852] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 18508c5b-2830-41fd-b92c-675a6d04e6be] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1445.610178] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 5babb2a4-c9a1-412c-8fd2-91880037d119] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1445.612646] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1445.612878] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.613082] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1445.613265] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.613427] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.616666] env[65503]: INFO nova.compute.manager [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Terminating instance [ 1446.116451] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: a1247f14-ebd5-4097-9532-91ddbc9ff8af] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1446.120306] env[65503]: DEBUG nova.compute.manager [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1446.120497] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1446.121362] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6d6fb5-612c-4e4b-9488-0627f89cafc3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.130901] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1446.131278] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69b2a80c-2d24-4439-af5b-cbf1ca4e2d49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.139969] env[65503]: DEBUG oslo_vmware.api [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1446.139969] env[65503]: value = "task-4451102" [ 1446.139969] env[65503]: _type = "Task" [ 1446.139969] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.154137] env[65503]: DEBUG oslo_vmware.api [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451102, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.620127] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 8d4ff5ca-12c6-488a-9a23-495d24e012e7] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1446.651231] env[65503]: DEBUG oslo_vmware.api [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451102, 'name': PowerOffVM_Task, 'duration_secs': 0.202264} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.651613] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1446.651798] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1446.652061] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1043ccc2-99e6-4040-a7aa-34d2f6d54ae9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.714793] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1446.715018] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1446.715213] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleting the datastore file [datastore1] 5643f8b3-249f-45c1-a15c-ee3243ae3b9a {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1446.715483] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1470643-9805-4311-9fb9-3786dc6cca05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.723168] env[65503]: DEBUG oslo_vmware.api [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1446.723168] env[65503]: value = "task-4451104" [ 1446.723168] env[65503]: _type = "Task" [ 1446.723168] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.731609] env[65503]: DEBUG oslo_vmware.api [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451104, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.123382] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 6d11d79b-b11f-4a31-a4e3-aa5b3346ae17] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1447.233335] env[65503]: DEBUG oslo_vmware.api [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451104, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166178} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.233596] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1447.233775] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1447.233947] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1447.234136] env[65503]: INFO nova.compute.manager [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1447.234385] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1447.234580] env[65503]: DEBUG nova.compute.manager [-] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1447.234674] env[65503]: DEBUG nova.network.neutron [-] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1447.234913] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1447.235461] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1447.235709] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1447.272389] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1447.534776] env[65503]: DEBUG nova.compute.manager [req-c0a35b99-800b-42f3-b309-1daf7215808b req-357cbdb5-9ee9-4422-a538-afad22bd8a66 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Received event network-vif-deleted-5e90cf6e-0330-4a1c-8c87-4e3da81c88c0 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1447.534981] env[65503]: INFO nova.compute.manager [req-c0a35b99-800b-42f3-b309-1daf7215808b req-357cbdb5-9ee9-4422-a538-afad22bd8a66 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Neutron deleted interface 5e90cf6e-0330-4a1c-8c87-4e3da81c88c0; detaching it from the instance and deleting it from the info cache [ 1447.535162] env[65503]: DEBUG nova.network.neutron [req-c0a35b99-800b-42f3-b309-1daf7215808b req-357cbdb5-9ee9-4422-a538-afad22bd8a66 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1447.627219] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: fb2dddac-4ac0-498a-b972-e61255833ad0] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1448.010899] env[65503]: DEBUG nova.network.neutron [-] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1448.038221] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05230220-1095-4aab-bbd3-e4ca396e72a6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.048574] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d425cf-07e5-447a-a92a-d1143637ddb8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.075443] env[65503]: DEBUG nova.compute.manager [req-c0a35b99-800b-42f3-b309-1daf7215808b req-357cbdb5-9ee9-4422-a538-afad22bd8a66 service nova] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Detach interface failed, port_id=5e90cf6e-0330-4a1c-8c87-4e3da81c88c0, reason: Instance 5643f8b3-249f-45c1-a15c-ee3243ae3b9a could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1448.130531] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 6f9a7c04-78dd-46b2-a42d-6ff218478f19] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1448.513880] env[65503]: INFO nova.compute.manager [-] [instance: 5643f8b3-249f-45c1-a15c-ee3243ae3b9a] Took 1.28 seconds to deallocate network for instance. [ 1448.634478] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 0b5ee1d9-d386-4dbd-90d5-e3a01d7030a7] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1449.021145] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.021471] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.021693] env[65503]: DEBUG nova.objects.instance [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'resources' on Instance uuid 5643f8b3-249f-45c1-a15c-ee3243ae3b9a {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1449.137853] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bc0dad37-fc1d-4edc-8680-dba294dd724e] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1449.583879] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06194544-1935-414a-b257-98bf53d35f24 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.592724] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5054d7-51a0-488a-b393-1fd3f94ae7db {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.624125] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89b3d90-110f-45d3-b7a1-f954382c51e2 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.632247] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2072603c-b74b-4fdc-bd2a-c8144a786911 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.646974] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: afceb6e3-1a1e-4fc8-91ff-9cb66c8a79ad] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1449.649229] env[65503]: DEBUG nova.compute.provider_tree [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.152116] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 7b98ff40-1580-4175-adc5-66ca8977990a] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1450.154552] env[65503]: DEBUG nova.scheduler.client.report [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1450.659709] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1450.662230] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: bc700871-233c-4aa0-867e-4f166b6f54d1] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1450.685207] env[65503]: INFO nova.scheduler.client.report [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted allocations for instance 5643f8b3-249f-45c1-a15c-ee3243ae3b9a [ 1451.165987] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: 2caf0a56-ab65-41c9-b7fb-aca29bc7fb1a] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1451.193302] env[65503]: DEBUG oslo_concurrency.lockutils [None req-56a1deaf-82be-4fb6-8a10-2a70675138f3 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "5643f8b3-249f-45c1-a15c-ee3243ae3b9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.580s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1451.669132] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: b7f55645-f152-4fc9-a962-393f9a9b9c55] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1452.172506] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: d3ca90c9-3dfa-47a5-b48b-67a45ea26021] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1452.675558] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: b00a98a4-4865-4a02-a353-3d1da9ef0e51] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1452.698671] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "21882f8d-89bd-4291-a33a-b18b6dfc5548" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.698894] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1453.178598] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: ecb5735d-2fa3-4aa4-95cc-dbf5e7eef046] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1453.201303] env[65503]: DEBUG nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1453.682204] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] [instance: e37758cc-7287-4271-ad47-d711201d0add] Instance has had 0 of 5 cleanup attempts {{(pid=65503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1453.725354] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1453.725748] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1453.728395] env[65503]: INFO nova.compute.claims [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1454.185397] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.790831] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a25bb34-e5b6-46b6-8ffa-3d36b62979d1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.799049] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60605cb2-2ba5-4dae-b72d-9479a962a681 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.829092] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9898444-14c5-4c44-be90-8c6f5cda7d33 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.837975] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a3814e-56f9-4819-b69a-c85c5bfa948c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.853174] env[65503]: DEBUG nova.compute.provider_tree [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1455.357031] env[65503]: DEBUG nova.scheduler.client.report [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1455.691294] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.693265] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.863598] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1455.864487] env[65503]: DEBUG nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1456.372033] env[65503]: DEBUG nova.compute.utils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1456.373856] env[65503]: DEBUG nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1456.374086] env[65503]: DEBUG nova.network.neutron [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1456.374472] env[65503]: WARNING neutronclient.v2_0.client [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1456.374785] env[65503]: WARNING neutronclient.v2_0.client [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1456.375395] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1456.375713] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1456.422185] env[65503]: DEBUG nova.policy [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3299e99bda746c89cd71759d037fd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e7660e16774c408729de84ba5c7534', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1456.738720] env[65503]: DEBUG nova.network.neutron [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Successfully created port: cae0dc26-fecc-453d-bc7e-f58104bfe623 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1456.884136] env[65503]: DEBUG nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1456.900639] env[65503]: DEBUG nova.compute.manager [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Stashing vm_state: active {{(pid=65503) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1457.421796] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1457.422099] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1457.894281] env[65503]: DEBUG nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1457.921117] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1457.921390] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1457.921553] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1457.921836] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1457.922034] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1457.922201] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1457.922405] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1457.922555] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1457.922736] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1457.922908] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1457.923095] env[65503]: DEBUG nova.virt.hardware [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1457.924025] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1d67ff-701e-4093-beba-f909bd9e0f8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.928661] env[65503]: INFO nova.compute.claims [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1457.938159] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9532b2b1-3b4a-4652-bf6d-3c0a3defe5eb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.129166] env[65503]: DEBUG nova.compute.manager [req-a4e42d84-3506-40b0-962b-baf4065ffaab req-44c8ba9f-ff6f-43c0-8791-1131415c8e98 service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Received event network-vif-plugged-cae0dc26-fecc-453d-bc7e-f58104bfe623 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1458.129427] env[65503]: DEBUG oslo_concurrency.lockutils [req-a4e42d84-3506-40b0-962b-baf4065ffaab req-44c8ba9f-ff6f-43c0-8791-1131415c8e98 service nova] Acquiring lock "21882f8d-89bd-4291-a33a-b18b6dfc5548-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1458.129592] env[65503]: DEBUG oslo_concurrency.lockutils [req-a4e42d84-3506-40b0-962b-baf4065ffaab req-44c8ba9f-ff6f-43c0-8791-1131415c8e98 service nova] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1458.129752] env[65503]: DEBUG oslo_concurrency.lockutils [req-a4e42d84-3506-40b0-962b-baf4065ffaab req-44c8ba9f-ff6f-43c0-8791-1131415c8e98 service nova] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1458.129912] env[65503]: DEBUG nova.compute.manager [req-a4e42d84-3506-40b0-962b-baf4065ffaab req-44c8ba9f-ff6f-43c0-8791-1131415c8e98 service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] No waiting events found dispatching network-vif-plugged-cae0dc26-fecc-453d-bc7e-f58104bfe623 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1458.130544] env[65503]: WARNING nova.compute.manager [req-a4e42d84-3506-40b0-962b-baf4065ffaab req-44c8ba9f-ff6f-43c0-8791-1131415c8e98 service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Received unexpected event network-vif-plugged-cae0dc26-fecc-453d-bc7e-f58104bfe623 for instance with vm_state building and task_state spawning. [ 1458.229432] env[65503]: DEBUG nova.network.neutron [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Successfully updated port: cae0dc26-fecc-453d-bc7e-f58104bfe623 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1458.435556] env[65503]: INFO nova.compute.resource_tracker [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating resource usage from migration f3dbbaaf-3ce5-4f0a-82d2-606eb3b3257e [ 1458.506506] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f63f8a5-a08b-4276-9256-67beb506943a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.514897] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b062ce3-8738-40ac-a801-d3328b90dc75 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.547015] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1667b1-1a67-405d-874c-3e4a2b2d130a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.554746] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7011796-b440-42cc-b5d4-142f91d2a473 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.568176] env[65503]: DEBUG nova.compute.provider_tree [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1458.732582] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "refresh_cache-21882f8d-89bd-4291-a33a-b18b6dfc5548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.732795] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "refresh_cache-21882f8d-89bd-4291-a33a-b18b6dfc5548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1458.732886] env[65503]: DEBUG nova.network.neutron [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1459.071598] env[65503]: DEBUG nova.scheduler.client.report [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1459.236422] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1459.236836] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1459.273045] env[65503]: DEBUG nova.network.neutron [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1459.293032] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1459.293446] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1459.356471] env[65503]: WARNING neutronclient.v2_0.client [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1459.357200] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1459.357547] env[65503]: WARNING openstack [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1459.439036] env[65503]: DEBUG nova.network.neutron [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Updating instance_info_cache with network_info: [{"id": "cae0dc26-fecc-453d-bc7e-f58104bfe623", "address": "fa:16:3e:7f:65:2b", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae0dc26-fe", "ovs_interfaceid": "cae0dc26-fecc-453d-bc7e-f58104bfe623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1459.576723] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.154s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1459.576945] env[65503]: INFO nova.compute.manager [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Migrating [ 1459.941846] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "refresh_cache-21882f8d-89bd-4291-a33a-b18b6dfc5548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1459.942381] env[65503]: DEBUG nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Instance network_info: |[{"id": "cae0dc26-fecc-453d-bc7e-f58104bfe623", "address": "fa:16:3e:7f:65:2b", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae0dc26-fe", "ovs_interfaceid": "cae0dc26-fecc-453d-bc7e-f58104bfe623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1459.942945] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:65:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cae0dc26-fecc-453d-bc7e-f58104bfe623', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1459.950669] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1459.950991] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1459.951251] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1fa6c19-3a55-459d-a7aa-5f0c26c16de7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.971535] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1459.971535] env[65503]: value = "task-4451105" [ 1459.971535] env[65503]: _type = "Task" [ 1459.971535] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.979977] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451105, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.093542] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.093771] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1460.093945] env[65503]: DEBUG nova.network.neutron [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1460.161014] env[65503]: DEBUG nova.compute.manager [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Received event network-changed-cae0dc26-fecc-453d-bc7e-f58104bfe623 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1460.161014] env[65503]: DEBUG nova.compute.manager [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Refreshing instance network info cache due to event network-changed-cae0dc26-fecc-453d-bc7e-f58104bfe623. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1460.161014] env[65503]: DEBUG oslo_concurrency.lockutils [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Acquiring lock "refresh_cache-21882f8d-89bd-4291-a33a-b18b6dfc5548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.161285] env[65503]: DEBUG oslo_concurrency.lockutils [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Acquired lock "refresh_cache-21882f8d-89bd-4291-a33a-b18b6dfc5548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1460.161783] env[65503]: DEBUG nova.network.neutron [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Refreshing network info cache for port cae0dc26-fecc-453d-bc7e-f58104bfe623 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1460.482462] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451105, 'name': CreateVM_Task, 'duration_secs': 0.318667} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.482809] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1460.483101] env[65503]: WARNING neutronclient.v2_0.client [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1460.483472] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.483623] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1460.483959] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1460.484230] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a79d8d5a-b88c-43a4-9790-282da22301e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.489128] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1460.489128] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52079f34-7319-b6b0-b0d4-c8c6680ba38c" [ 1460.489128] env[65503]: _type = "Task" [ 1460.489128] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.496824] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52079f34-7319-b6b0-b0d4-c8c6680ba38c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.596953] env[65503]: WARNING neutronclient.v2_0.client [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1460.597706] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1460.598098] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1460.664968] env[65503]: WARNING neutronclient.v2_0.client [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1460.665707] env[65503]: WARNING openstack [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1460.666099] env[65503]: WARNING openstack [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1460.717102] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1460.717513] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1460.784686] env[65503]: WARNING neutronclient.v2_0.client [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1460.785404] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1460.785775] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1460.862754] env[65503]: DEBUG nova.network.neutron [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1461.001255] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52079f34-7319-b6b0-b0d4-c8c6680ba38c, 'name': SearchDatastore_Task, 'duration_secs': 0.010207} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.001892] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1461.002148] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1461.002389] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.002533] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1461.002705] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1461.003160] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4df7e24d-9e23-48ea-a94a-447ea439fab8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.012115] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1461.012291] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1461.013013] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-024caba3-f9d4-4871-ba98-9a3178034308 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.027532] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1461.027532] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f2b1d2-5032-892a-8dcc-a9969eaa488d" [ 1461.027532] env[65503]: _type = "Task" [ 1461.027532] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.037969] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f2b1d2-5032-892a-8dcc-a9969eaa488d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.189551] env[65503]: WARNING openstack [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1461.189944] env[65503]: WARNING openstack [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1461.251478] env[65503]: WARNING neutronclient.v2_0.client [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1461.251916] env[65503]: WARNING openstack [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1461.252385] env[65503]: WARNING openstack [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1461.330457] env[65503]: DEBUG nova.network.neutron [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Updated VIF entry in instance network info cache for port cae0dc26-fecc-453d-bc7e-f58104bfe623. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1461.330830] env[65503]: DEBUG nova.network.neutron [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Updating instance_info_cache with network_info: [{"id": "cae0dc26-fecc-453d-bc7e-f58104bfe623", "address": "fa:16:3e:7f:65:2b", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae0dc26-fe", "ovs_interfaceid": "cae0dc26-fecc-453d-bc7e-f58104bfe623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1461.365807] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1461.539038] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52f2b1d2-5032-892a-8dcc-a9969eaa488d, 'name': SearchDatastore_Task, 'duration_secs': 0.013519} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.539788] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f22d0da5-81aa-4cce-84ca-19dbb16c0dae {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.545632] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1461.545632] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5272c95a-e6cf-f6aa-a3cc-13779b85e6a0" [ 1461.545632] env[65503]: _type = "Task" [ 1461.545632] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.553595] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5272c95a-e6cf-f6aa-a3cc-13779b85e6a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.834301] env[65503]: DEBUG oslo_concurrency.lockutils [req-090afc4e-fe6a-49bc-88dd-d85ef6639e47 req-0695ae58-a055-45ff-be22-93aeae378f3e service nova] Releasing lock "refresh_cache-21882f8d-89bd-4291-a33a-b18b6dfc5548" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1462.056378] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5272c95a-e6cf-f6aa-a3cc-13779b85e6a0, 'name': SearchDatastore_Task, 'duration_secs': 0.010141} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.056611] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1462.056826] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 21882f8d-89bd-4291-a33a-b18b6dfc5548/21882f8d-89bd-4291-a33a-b18b6dfc5548.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1462.057099] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2aa41a48-9710-441b-be88-eb5bf610988f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.064626] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1462.064626] env[65503]: value = "task-4451106" [ 1462.064626] env[65503]: _type = "Task" [ 1462.064626] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.072530] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.575199] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451106, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454017} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.575592] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 21882f8d-89bd-4291-a33a-b18b6dfc5548/21882f8d-89bd-4291-a33a-b18b6dfc5548.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1462.575697] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1462.575948] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca518563-4a00-4669-99ea-d9db0ee38a31 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.582727] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1462.582727] env[65503]: value = "task-4451107" [ 1462.582727] env[65503]: _type = "Task" [ 1462.582727] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.592087] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451107, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.881021] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f3cccb-6026-42a7-acbc-fd2d03667a6f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.899689] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance '3b7a6813-c1fa-4f51-ae17-cc147a8c809d' progress to 0 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1463.094647] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451107, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067376} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.095080] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1463.095877] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180808ac-9294-4b01-bc82-8720f6742ea5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.119085] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 21882f8d-89bd-4291-a33a-b18b6dfc5548/21882f8d-89bd-4291-a33a-b18b6dfc5548.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1463.119406] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e240a865-b2fc-4aef-b005-ad8a5b4caf91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.139905] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1463.139905] env[65503]: value = "task-4451108" [ 1463.139905] env[65503]: _type = "Task" [ 1463.139905] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.148680] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.405638] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.405963] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66f436ed-375e-478a-8df4-f1848858b9e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.414304] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1463.414304] env[65503]: value = "task-4451109" [ 1463.414304] env[65503]: _type = "Task" [ 1463.414304] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.423150] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.651722] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451108, 'name': ReconfigVM_Task, 'duration_secs': 0.301085} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.652174] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 21882f8d-89bd-4291-a33a-b18b6dfc5548/21882f8d-89bd-4291-a33a-b18b6dfc5548.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1463.652664] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b087805-829d-4f54-8c99-ba0cda4d60e8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.660072] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1463.660072] env[65503]: value = "task-4451110" [ 1463.660072] env[65503]: _type = "Task" [ 1463.660072] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.669284] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451110, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.924371] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451109, 'name': PowerOffVM_Task, 'duration_secs': 0.209693} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.924684] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1463.924809] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance '3b7a6813-c1fa-4f51-ae17-cc147a8c809d' progress to 17 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1464.170558] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451110, 'name': Rename_Task, 'duration_secs': 0.142812} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.170882] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1464.171185] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7e93368-14b3-47f8-9ca8-dd4e340d1202 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.178681] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1464.178681] env[65503]: value = "task-4451111" [ 1464.178681] env[65503]: _type = "Task" [ 1464.178681] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.186959] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.431246] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1464.431512] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1464.431668] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1464.431845] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1464.431984] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1464.432184] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1464.432353] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1464.432507] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1464.432666] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1464.432821] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1464.432988] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1464.437982] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dbb025f-e9a2-4530-b675-57c5458bc302 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.455212] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1464.455212] env[65503]: value = "task-4451112" [ 1464.455212] env[65503]: _type = "Task" [ 1464.455212] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.464266] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.688959] env[65503]: DEBUG oslo_vmware.api [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451111, 'name': PowerOnVM_Task, 'duration_secs': 0.509118} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.689386] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.689486] env[65503]: INFO nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Took 6.79 seconds to spawn the instance on the hypervisor. [ 1464.689632] env[65503]: DEBUG nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1464.690470] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72ede30-bb6f-440d-8f27-2eb8b5fcf781 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.966197] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451112, 'name': ReconfigVM_Task, 'duration_secs': 0.22171} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.966507] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance '3b7a6813-c1fa-4f51-ae17-cc147a8c809d' progress to 33 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1465.207944] env[65503]: INFO nova.compute.manager [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Took 11.50 seconds to build instance. [ 1465.472361] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1465.472624] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1465.472763] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1465.472936] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1465.473085] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1465.473227] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1465.473452] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1465.473567] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1465.473719] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1465.473866] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1465.474034] env[65503]: DEBUG nova.virt.hardware [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1465.479272] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1465.479564] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42dd4d68-846e-4f07-8396-62a034881dde {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.499529] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1465.499529] env[65503]: value = "task-4451113" [ 1465.499529] env[65503]: _type = "Task" [ 1465.499529] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.508204] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451113, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.709450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-88e5e170-cc42-478a-82be-dfe3437838f0 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.010s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1465.868384] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "21882f8d-89bd-4291-a33a-b18b6dfc5548" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.868656] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.868886] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "21882f8d-89bd-4291-a33a-b18b6dfc5548-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.869163] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.869347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1465.871739] env[65503]: INFO nova.compute.manager [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Terminating instance [ 1466.010488] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451113, 'name': ReconfigVM_Task, 'duration_secs': 0.176544} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.010719] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1466.011527] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca917255-595c-423d-a43e-a730a6efbad7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.034885] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1466.035192] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0008c6d-c55a-4ac0-97c0-19b3b1c4a8e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.053620] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1466.053620] env[65503]: value = "task-4451114" [ 1466.053620] env[65503]: _type = "Task" [ 1466.053620] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.062760] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451114, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.375573] env[65503]: DEBUG nova.compute.manager [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1466.375827] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1466.376820] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef20f84-72dd-4215-8e60-49a9b294aee3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.384789] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1466.385049] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbd09ed0-2096-48e0-b579-023118627073 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.391962] env[65503]: DEBUG oslo_vmware.api [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1466.391962] env[65503]: value = "task-4451115" [ 1466.391962] env[65503]: _type = "Task" [ 1466.391962] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.401095] env[65503]: DEBUG oslo_vmware.api [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.565438] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451114, 'name': ReconfigVM_Task, 'duration_secs': 0.25229} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.565576] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1466.565862] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance '3b7a6813-c1fa-4f51-ae17-cc147a8c809d' progress to 50 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1466.904120] env[65503]: DEBUG oslo_vmware.api [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451115, 'name': PowerOffVM_Task, 'duration_secs': 0.184259} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.904469] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1466.904573] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1466.904775] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-def40487-4ed0-4eee-a8ef-e9abd14b1473 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.968396] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1466.968623] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1466.968778] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleting the datastore file [datastore2] 21882f8d-89bd-4291-a33a-b18b6dfc5548 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1466.969119] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-835d7192-361b-4653-a0cf-4b5da93a095d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.975853] env[65503]: DEBUG oslo_vmware.api [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1466.975853] env[65503]: value = "task-4451117" [ 1466.975853] env[65503]: _type = "Task" [ 1466.975853] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.983854] env[65503]: DEBUG oslo_vmware.api [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.072730] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e73a72f-4251-4514-83ba-01e7b07c9c18 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.092467] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2954a5b6-c51f-485d-9156-0696f724bfc0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.110996] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance '3b7a6813-c1fa-4f51-ae17-cc147a8c809d' progress to 67 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1467.485709] env[65503]: DEBUG oslo_vmware.api [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150926} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.485956] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1467.486174] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1467.486382] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1467.486576] env[65503]: INFO nova.compute.manager [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1467.486811] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1467.486997] env[65503]: DEBUG nova.compute.manager [-] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1467.487104] env[65503]: DEBUG nova.network.neutron [-] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1467.487389] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1467.487912] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1467.488179] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1467.546396] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1467.617407] env[65503]: WARNING neutronclient.v2_0.client [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1467.665070] env[65503]: DEBUG nova.network.neutron [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Port 38fa3af1-f940-49c8-b96a-07349687620f binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1467.854876] env[65503]: DEBUG nova.compute.manager [req-e0677d97-89c9-4d75-a86c-3fbb171942d5 req-dd42b591-953b-4c38-beaa-f85c055f9cdf service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Received event network-vif-deleted-cae0dc26-fecc-453d-bc7e-f58104bfe623 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1467.855487] env[65503]: INFO nova.compute.manager [req-e0677d97-89c9-4d75-a86c-3fbb171942d5 req-dd42b591-953b-4c38-beaa-f85c055f9cdf service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Neutron deleted interface cae0dc26-fecc-453d-bc7e-f58104bfe623; detaching it from the instance and deleting it from the info cache [ 1467.855679] env[65503]: DEBUG nova.network.neutron [req-e0677d97-89c9-4d75-a86c-3fbb171942d5 req-dd42b591-953b-4c38-beaa-f85c055f9cdf service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1468.333075] env[65503]: DEBUG nova.network.neutron [-] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1468.358658] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c174b252-b422-4197-9cbe-a88d6e994f24 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.368409] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81202af2-63cd-4656-aa61-8f8ec0877f94 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.396989] env[65503]: DEBUG nova.compute.manager [req-e0677d97-89c9-4d75-a86c-3fbb171942d5 req-dd42b591-953b-4c38-beaa-f85c055f9cdf service nova] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Detach interface failed, port_id=cae0dc26-fecc-453d-bc7e-f58104bfe623, reason: Instance 21882f8d-89bd-4291-a33a-b18b6dfc5548 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1468.687802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.688047] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.688168] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1468.835503] env[65503]: INFO nova.compute.manager [-] [instance: 21882f8d-89bd-4291-a33a-b18b6dfc5548] Took 1.35 seconds to deallocate network for instance. [ 1469.342236] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1469.342574] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1469.342771] env[65503]: DEBUG nova.objects.instance [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'resources' on Instance uuid 21882f8d-89bd-4291-a33a-b18b6dfc5548 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1469.691447] env[65503]: WARNING neutronclient.v2_0.client [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1469.756585] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.756768] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1469.756940] env[65503]: DEBUG nova.network.neutron [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1469.907345] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7deab4ae-78c0-4d59-b4dc-486fe7820f4a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.915501] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65773c86-55c9-4b0d-b098-111c7493dff7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.946339] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8dd3f3-4583-4bf0-a878-40b239800e2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.953938] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f735106e-4ef1-4bec-8640-0ae193da5f8d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.967686] env[65503]: DEBUG nova.compute.provider_tree [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.259936] env[65503]: WARNING neutronclient.v2_0.client [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1470.260708] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1470.261072] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1470.374548] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1470.374954] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1470.436821] env[65503]: WARNING neutronclient.v2_0.client [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1470.437571] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1470.437905] env[65503]: WARNING openstack [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1470.471624] env[65503]: DEBUG nova.scheduler.client.report [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1470.521358] env[65503]: DEBUG nova.network.neutron [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1470.976189] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1470.995821] env[65503]: INFO nova.scheduler.client.report [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted allocations for instance 21882f8d-89bd-4291-a33a-b18b6dfc5548 [ 1471.023958] env[65503]: DEBUG oslo_concurrency.lockutils [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1471.504634] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c25ff096-b9a8-45cb-9f99-14f45223ee87 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "21882f8d-89bd-4291-a33a-b18b6dfc5548" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.636s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1471.546480] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe5dede-e7c1-47ec-8a25-ed8ccc98f1f7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.566245] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebc501e-5d66-4ec2-a0d1-69c4b9882657 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.573562] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance '3b7a6813-c1fa-4f51-ae17-cc147a8c809d' progress to 83 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1472.080186] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1472.080529] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c7938e9-353e-4f8a-bd1e-2c0b039099ff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.088235] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1472.088235] env[65503]: value = "task-4451118" [ 1472.088235] env[65503]: _type = "Task" [ 1472.088235] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.097863] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.598899] env[65503]: DEBUG oslo_vmware.api [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451118, 'name': PowerOnVM_Task, 'duration_secs': 0.369242} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.599334] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1472.599377] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-064757ea-d49c-4179-abe9-7880b0c0bcd8 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance '3b7a6813-c1fa-4f51-ae17-cc147a8c809d' progress to 100 {{(pid=65503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1472.889623] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "cc6a9305-bd7b-487d-98f3-f719e444e18c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1472.889832] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1473.391745] env[65503]: DEBUG nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1473.915074] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1473.915417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1473.916637] env[65503]: INFO nova.compute.claims [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1474.988977] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbabab0f-cad7-4c6e-a991-4680fc72ebd4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.998634] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811e932b-a39c-47fc-84bd-2ef744a89a19 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.030249] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75d9bf1-0120-45bf-ac3f-320d7377b4ec {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.038206] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451de8c3-cbe5-4b5c-9600-db2d1d031b5b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.051882] env[65503]: DEBUG nova.compute.provider_tree [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.366435] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1475.400596] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1475.400969] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1475.439364] env[65503]: DEBUG nova.network.neutron [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Port 38fa3af1-f940-49c8-b96a-07349687620f binding to destination host cpu-1 is already ACTIVE {{(pid=65503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3270}} [ 1475.439719] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.439919] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1475.440170] env[65503]: DEBUG nova.network.neutron [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1475.555510] env[65503]: DEBUG nova.scheduler.client.report [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1475.942918] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1475.943638] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1475.943988] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1476.060385] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.145s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1476.060920] env[65503]: DEBUG nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1476.068827] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1476.069265] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1476.128478] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1476.129200] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1476.129554] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1476.207693] env[65503]: DEBUG nova.network.neutron [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1476.566078] env[65503]: DEBUG nova.compute.utils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1476.568856] env[65503]: DEBUG nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1476.569060] env[65503]: DEBUG nova.network.neutron [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1476.569456] env[65503]: WARNING neutronclient.v2_0.client [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1476.569751] env[65503]: WARNING neutronclient.v2_0.client [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1476.570341] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1476.570672] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1476.626028] env[65503]: DEBUG nova.policy [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3299e99bda746c89cd71759d037fd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e7660e16774c408729de84ba5c7534', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1476.710812] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1476.898479] env[65503]: DEBUG nova.network.neutron [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Successfully created port: f566fe60-8ce2-4400-8738-f877c5dd3510 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1477.078570] env[65503]: DEBUG nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1477.214502] env[65503]: DEBUG nova.compute.manager [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=65503) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:925}} [ 1477.214785] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.215053] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.718882] env[65503]: DEBUG nova.objects.instance [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'migration_context' on Instance uuid 3b7a6813-c1fa-4f51-ae17-cc147a8c809d {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1478.088979] env[65503]: DEBUG nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1478.115273] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1478.115522] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1478.115672] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1478.115845] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1478.115980] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1478.116139] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1478.116341] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1478.116491] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1478.116644] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1478.116795] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1478.116956] env[65503]: DEBUG nova.virt.hardware [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1478.117880] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118003bb-5de3-4a22-9110-c5c58b5228a8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.126372] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc4e62c-4425-4834-b977-3c093f882a0a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.291896] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e5a28d-3940-4623-b04c-37e4d949c59a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.299970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9030d7c1-d246-432a-971a-f10c83b2ff1c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.332895] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a159856-ce5f-45f2-b66a-aa793d9b223a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.341238] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62525b61-00f4-4721-8323-a261b78bee96 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.355351] env[65503]: DEBUG nova.compute.provider_tree [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.496401] env[65503]: DEBUG nova.compute.manager [req-295a5843-5f1b-4846-8bee-d7b1f8c9457a req-347db0d4-62c3-46d0-96c4-4bc0ad884a55 service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Received event network-vif-plugged-f566fe60-8ce2-4400-8738-f877c5dd3510 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1478.496672] env[65503]: DEBUG oslo_concurrency.lockutils [req-295a5843-5f1b-4846-8bee-d7b1f8c9457a req-347db0d4-62c3-46d0-96c4-4bc0ad884a55 service nova] Acquiring lock "cc6a9305-bd7b-487d-98f3-f719e444e18c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1478.497348] env[65503]: DEBUG oslo_concurrency.lockutils [req-295a5843-5f1b-4846-8bee-d7b1f8c9457a req-347db0d4-62c3-46d0-96c4-4bc0ad884a55 service nova] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1478.497636] env[65503]: DEBUG oslo_concurrency.lockutils [req-295a5843-5f1b-4846-8bee-d7b1f8c9457a req-347db0d4-62c3-46d0-96c4-4bc0ad884a55 service nova] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1478.497820] env[65503]: DEBUG nova.compute.manager [req-295a5843-5f1b-4846-8bee-d7b1f8c9457a req-347db0d4-62c3-46d0-96c4-4bc0ad884a55 service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] No waiting events found dispatching network-vif-plugged-f566fe60-8ce2-4400-8738-f877c5dd3510 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1478.497975] env[65503]: WARNING nova.compute.manager [req-295a5843-5f1b-4846-8bee-d7b1f8c9457a req-347db0d4-62c3-46d0-96c4-4bc0ad884a55 service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Received unexpected event network-vif-plugged-f566fe60-8ce2-4400-8738-f877c5dd3510 for instance with vm_state building and task_state spawning. [ 1478.845909] env[65503]: DEBUG nova.network.neutron [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Successfully updated port: f566fe60-8ce2-4400-8738-f877c5dd3510 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1478.858894] env[65503]: DEBUG nova.scheduler.client.report [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1478.867112] env[65503]: DEBUG nova.compute.manager [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Received event network-changed-f566fe60-8ce2-4400-8738-f877c5dd3510 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1478.867112] env[65503]: DEBUG nova.compute.manager [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Refreshing instance network info cache due to event network-changed-f566fe60-8ce2-4400-8738-f877c5dd3510. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1478.867321] env[65503]: DEBUG oslo_concurrency.lockutils [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] Acquiring lock "refresh_cache-cc6a9305-bd7b-487d-98f3-f719e444e18c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.867321] env[65503]: DEBUG oslo_concurrency.lockutils [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] Acquired lock "refresh_cache-cc6a9305-bd7b-487d-98f3-f719e444e18c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1478.867627] env[65503]: DEBUG nova.network.neutron [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Refreshing network info cache for port f566fe60-8ce2-4400-8738-f877c5dd3510 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1479.348792] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "refresh_cache-cc6a9305-bd7b-487d-98f3-f719e444e18c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.369687] env[65503]: WARNING neutronclient.v2_0.client [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1479.370385] env[65503]: WARNING openstack [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1479.370734] env[65503]: WARNING openstack [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1479.411405] env[65503]: DEBUG nova.network.neutron [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1479.489481] env[65503]: DEBUG nova.network.neutron [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1479.870598] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.655s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1479.992586] env[65503]: DEBUG oslo_concurrency.lockutils [req-a593e317-8cd1-4a5b-bb6e-062b9694a68d req-e6644ffc-b0f5-4e28-8d68-16c7228ca20e service nova] Releasing lock "refresh_cache-cc6a9305-bd7b-487d-98f3-f719e444e18c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1479.993048] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "refresh_cache-cc6a9305-bd7b-487d-98f3-f719e444e18c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1479.993240] env[65503]: DEBUG nova.network.neutron [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1480.496306] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1480.496698] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1480.533012] env[65503]: DEBUG nova.network.neutron [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1480.552055] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1480.552471] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1480.613447] env[65503]: WARNING neutronclient.v2_0.client [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1480.614125] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1480.614509] env[65503]: WARNING openstack [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1480.697079] env[65503]: DEBUG nova.network.neutron [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Updating instance_info_cache with network_info: [{"id": "f566fe60-8ce2-4400-8738-f877c5dd3510", "address": "fa:16:3e:1e:54:51", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf566fe60-8c", "ovs_interfaceid": "f566fe60-8ce2-4400-8738-f877c5dd3510", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1481.200162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "refresh_cache-cc6a9305-bd7b-487d-98f3-f719e444e18c" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1481.200588] env[65503]: DEBUG nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Instance network_info: |[{"id": "f566fe60-8ce2-4400-8738-f877c5dd3510", "address": "fa:16:3e:1e:54:51", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf566fe60-8c", "ovs_interfaceid": "f566fe60-8ce2-4400-8738-f877c5dd3510", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1481.201067] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:54:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f566fe60-8ce2-4400-8738-f877c5dd3510', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1481.208499] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1481.208720] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1481.208948] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f6b7153-1ade-4b3e-ac95-cdc19e596f8f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.229410] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1481.229410] env[65503]: value = "task-4451119" [ 1481.229410] env[65503]: _type = "Task" [ 1481.229410] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.238576] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451119, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.413780] env[65503]: INFO nova.compute.manager [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Swapping old allocation on dict_keys(['988ff85a-1d12-41bb-a369-e298e8491ca1']) held by migration f3dbbaaf-3ce5-4f0a-82d2-606eb3b3257e for instance [ 1481.438481] env[65503]: DEBUG nova.scheduler.client.report [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Overwriting current allocation {'allocations': {'988ff85a-1d12-41bb-a369-e298e8491ca1': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 175}}, 'project_id': '93906c603f7a4b18a34fc4b42fb6d6c1', 'user_id': 'af1890ab617d443e985db57a798cac5e', 'consumer_generation': 1} on consumer 3b7a6813-c1fa-4f51-ae17-cc147a8c809d {{(pid=65503) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1481.491522] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1481.535013] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.535274] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1481.535402] env[65503]: DEBUG nova.network.neutron [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1481.739832] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451119, 'name': CreateVM_Task, 'duration_secs': 0.31543} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.740016] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1481.740555] env[65503]: WARNING neutronclient.v2_0.client [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1481.740916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.741078] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1481.741394] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1481.741649] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57b2cae8-cd30-4e97-a342-ce5520b9d557 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.746476] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1481.746476] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]527eeaab-b0ef-587e-d2ac-53f48b96f921" [ 1481.746476] env[65503]: _type = "Task" [ 1481.746476] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.754795] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527eeaab-b0ef-587e-d2ac-53f48b96f921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.038234] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1482.038929] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1482.039336] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1482.162413] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1482.162807] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1482.224491] env[65503]: WARNING neutronclient.v2_0.client [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1482.225157] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1482.225501] env[65503]: WARNING openstack [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1482.259026] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]527eeaab-b0ef-587e-d2ac-53f48b96f921, 'name': SearchDatastore_Task, 'duration_secs': 0.010139} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.259403] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1482.259642] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1482.259871] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.260025] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1482.260308] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1482.260607] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1bd4c5f-75a2-419f-8ad1-82dc56b06707 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.272653] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1482.272833] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1482.273666] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b88226eb-244b-4652-ae71-43af8d8aed81 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.279514] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1482.279514] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5202dc88-d576-3820-0115-23c0b27c052c" [ 1482.279514] env[65503]: _type = "Task" [ 1482.279514] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.287740] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5202dc88-d576-3820-0115-23c0b27c052c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.308670] env[65503]: DEBUG nova.network.neutron [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [{"id": "38fa3af1-f940-49c8-b96a-07349687620f", "address": "fa:16:3e:55:bb:66", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38fa3af1-f9", "ovs_interfaceid": "38fa3af1-f940-49c8-b96a-07349687620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1482.790069] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5202dc88-d576-3820-0115-23c0b27c052c, 'name': SearchDatastore_Task, 'duration_secs': 0.009748} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.790911] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a38bddcc-c112-4f78-b011-da67637c5c8c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.796583] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1482.796583] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52050397-8477-95ac-cdad-774bd2589d61" [ 1482.796583] env[65503]: _type = "Task" [ 1482.796583] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.804851] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52050397-8477-95ac-cdad-774bd2589d61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.811464] env[65503]: DEBUG oslo_concurrency.lockutils [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-3b7a6813-c1fa-4f51-ae17-cc147a8c809d" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1482.811909] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1482.812166] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac3b81aa-7583-4ee6-abac-43b28c356fe0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.819399] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1482.819399] env[65503]: value = "task-4451120" [ 1482.819399] env[65503]: _type = "Task" [ 1482.819399] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.828920] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.307184] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52050397-8477-95ac-cdad-774bd2589d61, 'name': SearchDatastore_Task, 'duration_secs': 0.010073} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.307508] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1483.307803] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] cc6a9305-bd7b-487d-98f3-f719e444e18c/cc6a9305-bd7b-487d-98f3-f719e444e18c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1483.308124] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0207ce65-37ec-4424-b70e-124b728b8be4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.315040] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1483.315040] env[65503]: value = "task-4451121" [ 1483.315040] env[65503]: _type = "Task" [ 1483.315040] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.323183] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.330938] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451120, 'name': PowerOffVM_Task, 'duration_secs': 0.167691} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.331254] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1483.332012] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1483.332266] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1483.332451] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1483.332670] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1483.332840] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1483.333010] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1483.333247] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.333446] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1483.333629] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1483.333813] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1483.334033] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1483.339183] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-519bb346-a0dc-4c1f-85b0-c34009cf920b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.355455] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1483.355455] env[65503]: value = "task-4451122" [ 1483.355455] env[65503]: _type = "Task" [ 1483.355455] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.363966] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451122, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.826100] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451121, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465063} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.826369] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] cc6a9305-bd7b-487d-98f3-f719e444e18c/cc6a9305-bd7b-487d-98f3-f719e444e18c.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1483.826592] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1483.826894] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5898f783-d455-4310-8873-30e42bd42f6e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.835874] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1483.835874] env[65503]: value = "task-4451123" [ 1483.835874] env[65503]: _type = "Task" [ 1483.835874] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.844439] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451123, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.865468] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451122, 'name': ReconfigVM_Task, 'duration_secs': 0.13648} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.866291] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a41b135-efaf-477c-b973-c98a23ce75ba {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.885397] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1483.885635] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1483.885786] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1483.886099] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1483.886303] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1483.886457] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1483.886657] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.886854] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1483.887039] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1483.887252] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1483.887445] env[65503]: DEBUG nova.virt.hardware [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1483.888240] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20fcc620-7263-44ee-97b0-c1dbb4aea77b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.894574] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1483.894574] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52358605-1cfb-323e-3116-208f0a12baed" [ 1483.894574] env[65503]: _type = "Task" [ 1483.894574] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.904016] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52358605-1cfb-323e-3116-208f0a12baed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.346306] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451123, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065192} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.346585] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1484.347431] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb19cc4-2274-4ea0-82ad-a49dafabdbfd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.369361] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] cc6a9305-bd7b-487d-98f3-f719e444e18c/cc6a9305-bd7b-487d-98f3-f719e444e18c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1484.369628] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6400411a-0b63-4ca8-be3d-5cce461d45d8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.389865] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1484.389865] env[65503]: value = "task-4451124" [ 1484.389865] env[65503]: _type = "Task" [ 1484.389865] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.398741] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.408188] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52358605-1cfb-323e-3116-208f0a12baed, 'name': SearchDatastore_Task, 'duration_secs': 0.016465} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.413391] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1484.413680] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c544060c-02e2-47df-ac98-5fbf5c2712fd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.431985] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1484.431985] env[65503]: value = "task-4451125" [ 1484.431985] env[65503]: _type = "Task" [ 1484.431985] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.440378] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451125, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.900734] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451124, 'name': ReconfigVM_Task, 'duration_secs': 0.285576} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.901221] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Reconfigured VM instance instance-0000007b to attach disk [datastore1] cc6a9305-bd7b-487d-98f3-f719e444e18c/cc6a9305-bd7b-487d-98f3-f719e444e18c.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.901749] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bf34515-6bc2-4df0-9462-baea8602f92d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.908609] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1484.908609] env[65503]: value = "task-4451126" [ 1484.908609] env[65503]: _type = "Task" [ 1484.908609] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.917034] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451126, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.942871] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451125, 'name': ReconfigVM_Task, 'duration_secs': 0.182866} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.943455] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=65503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1484.944283] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2916e6a1-2f20-41e3-953f-bfb3843a5f1f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.968199] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1484.968712] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88578870-e8ae-4ad3-b3dd-b01ede5bcd73 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.991415] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1484.991415] env[65503]: value = "task-4451127" [ 1484.991415] env[65503]: _type = "Task" [ 1484.991415] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.002623] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451127, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.418694] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451126, 'name': Rename_Task, 'duration_secs': 0.174197} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.419014] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.419295] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17ed5259-ec18-4834-bdc2-b94c9c4c30f0 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.426197] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1485.426197] env[65503]: value = "task-4451128" [ 1485.426197] env[65503]: _type = "Task" [ 1485.426197] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.434501] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451128, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.500892] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451127, 'name': ReconfigVM_Task, 'duration_secs': 0.290262} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.501203] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d/3b7a6813-c1fa-4f51-ae17-cc147a8c809d.vmdk or device None with type thin {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1485.502098] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c9df93-207d-4237-93ad-372804adacf8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.520892] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e9fddd-bb3b-4a90-ae4c-6d75c5c0499f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.539116] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2051ed08-bdf2-458b-a1a1-18c0122835ac {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.558229] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861ac1ef-3aae-4801-a067-bf54c32a215b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.565611] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.565869] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88e425b5-3606-45cf-ac7d-25abdb73bb97 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.572620] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1485.572620] env[65503]: value = "task-4451129" [ 1485.572620] env[65503]: _type = "Task" [ 1485.572620] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.580865] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451129, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.942050] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451128, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.082580] env[65503]: DEBUG oslo_vmware.api [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451129, 'name': PowerOnVM_Task, 'duration_secs': 0.392303} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.083863] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.437993] env[65503]: DEBUG oslo_vmware.api [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451128, 'name': PowerOnVM_Task, 'duration_secs': 0.545944} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.438318] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.438476] env[65503]: INFO nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1486.438650] env[65503]: DEBUG nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1486.439500] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b10c6d5-749f-4118-b157-3706f5c11c0d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.959491] env[65503]: INFO nova.compute.manager [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Took 13.06 seconds to build instance. [ 1487.094863] env[65503]: INFO nova.compute.manager [None req-afbc642c-8d2c-482e-8051-0a9dd2d4fc55 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance to original state: 'active' [ 1487.462190] env[65503]: DEBUG oslo_concurrency.lockutils [None req-db278e03-0157-4218-9e31-a3741a06843d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.572s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1487.564131] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "e5c19364-d71b-4a55-983c-94ecb7b36893" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.564362] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1488.067135] env[65503]: DEBUG nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1488.594368] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1488.594731] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1488.596331] env[65503]: INFO nova.compute.claims [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1489.132305] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1489.132649] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1489.132753] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1489.132933] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1489.133106] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.135186] env[65503]: INFO nova.compute.manager [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Terminating instance [ 1489.638170] env[65503]: DEBUG nova.compute.manager [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1489.638391] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1489.639320] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd5d81a-5992-456f-9917-0ca8a22e8ee7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.649496] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1489.649836] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ec43aba-c6c8-4b5f-a0ef-8891aaefeaf5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.656853] env[65503]: DEBUG oslo_vmware.api [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1489.656853] env[65503]: value = "task-4451130" [ 1489.656853] env[65503]: _type = "Task" [ 1489.656853] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.665460] env[65503]: DEBUG oslo_vmware.api [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451130, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.667180] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ff59bc-1dd2-416b-a7e6-9be0698d03dc {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.674848] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1edabc-282f-4b5b-b9ab-01ccaa4b4258 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.705185] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de537d-3294-419f-8618-4ecdc7396f49 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.713589] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16dd2b2-519b-4c96-b1c1-89f71ae5efff {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.729805] env[65503]: DEBUG nova.compute.provider_tree [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1490.167745] env[65503]: DEBUG oslo_vmware.api [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451130, 'name': PowerOffVM_Task, 'duration_secs': 0.199901} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.168055] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1490.168194] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1490.168454] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b7fa538-e11a-4f2a-b5d4-71d05b19e5dd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.232945] env[65503]: DEBUG nova.scheduler.client.report [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1490.236213] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1490.236467] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1490.236653] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleting the datastore file [datastore2] 3b7a6813-c1fa-4f51-ae17-cc147a8c809d {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1490.237180] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cdea879-b186-4c01-89cc-37d5ac2bb110 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.245804] env[65503]: DEBUG oslo_vmware.api [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1490.245804] env[65503]: value = "task-4451132" [ 1490.245804] env[65503]: _type = "Task" [ 1490.245804] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.256109] env[65503]: DEBUG oslo_vmware.api [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.738939] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.739576] env[65503]: DEBUG nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1490.755448] env[65503]: DEBUG oslo_vmware.api [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151703} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.755701] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1490.755874] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1490.756049] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1490.756225] env[65503]: INFO nova.compute.manager [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1490.756456] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1490.756645] env[65503]: DEBUG nova.compute.manager [-] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1490.756739] env[65503]: DEBUG nova.network.neutron [-] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1490.756983] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1490.758034] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1490.758034] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1490.835556] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1491.244725] env[65503]: DEBUG nova.compute.utils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1491.246588] env[65503]: DEBUG nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1491.246787] env[65503]: DEBUG nova.network.neutron [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1491.247101] env[65503]: WARNING neutronclient.v2_0.client [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1491.247465] env[65503]: WARNING neutronclient.v2_0.client [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1491.248055] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1491.248395] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1491.318693] env[65503]: DEBUG nova.policy [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3299e99bda746c89cd71759d037fd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e7660e16774c408729de84ba5c7534', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1491.347442] env[65503]: DEBUG nova.compute.manager [req-5f4f4aed-07ee-46f4-9d01-554c7f8644c9 req-348040b6-fede-4f11-8a35-54024c8b6b8d service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Received event network-vif-deleted-38fa3af1-f940-49c8-b96a-07349687620f {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1491.347711] env[65503]: INFO nova.compute.manager [req-5f4f4aed-07ee-46f4-9d01-554c7f8644c9 req-348040b6-fede-4f11-8a35-54024c8b6b8d service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Neutron deleted interface 38fa3af1-f940-49c8-b96a-07349687620f; detaching it from the instance and deleting it from the info cache [ 1491.347902] env[65503]: DEBUG nova.network.neutron [req-5f4f4aed-07ee-46f4-9d01-554c7f8644c9 req-348040b6-fede-4f11-8a35-54024c8b6b8d service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1491.635309] env[65503]: DEBUG nova.network.neutron [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Successfully created port: 75add843-5b3a-4a5d-96b7-7d168ef23e39 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1491.756522] env[65503]: DEBUG nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1491.827538] env[65503]: DEBUG nova.network.neutron [-] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1491.850765] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4780c1d8-c037-4b32-9ee6-d0ca07c746b4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.861603] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecd2260-261c-486b-8c97-fda55b61c068 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.892825] env[65503]: DEBUG nova.compute.manager [req-5f4f4aed-07ee-46f4-9d01-554c7f8644c9 req-348040b6-fede-4f11-8a35-54024c8b6b8d service nova] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Detach interface failed, port_id=38fa3af1-f940-49c8-b96a-07349687620f, reason: Instance 3b7a6813-c1fa-4f51-ae17-cc147a8c809d could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1492.330383] env[65503]: INFO nova.compute.manager [-] [instance: 3b7a6813-c1fa-4f51-ae17-cc147a8c809d] Took 1.57 seconds to deallocate network for instance. [ 1492.767098] env[65503]: DEBUG nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1492.800835] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1492.801182] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1492.801342] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1492.801553] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1492.801678] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1492.801814] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1492.802025] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1492.802176] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1492.802340] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1492.802496] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1492.802659] env[65503]: DEBUG nova.virt.hardware [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1492.803567] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c73997-070b-4e44-b7cd-cd685d4932b8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.812176] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951ec607-f5c2-4f62-99b2-e3a3f675a0e5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.837480] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1492.837772] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1492.837957] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1492.863799] env[65503]: INFO nova.scheduler.client.report [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted allocations for instance 3b7a6813-c1fa-4f51-ae17-cc147a8c809d [ 1493.140899] env[65503]: DEBUG nova.network.neutron [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Successfully updated port: 75add843-5b3a-4a5d-96b7-7d168ef23e39 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1493.371510] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3180a3ff-d974-42c7-9cc5-511e5d2648aa tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "3b7a6813-c1fa-4f51-ae17-cc147a8c809d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.239s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1493.463943] env[65503]: DEBUG nova.compute.manager [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Received event network-vif-plugged-75add843-5b3a-4a5d-96b7-7d168ef23e39 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1493.464369] env[65503]: DEBUG oslo_concurrency.lockutils [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Acquiring lock "e5c19364-d71b-4a55-983c-94ecb7b36893-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1493.464534] env[65503]: DEBUG oslo_concurrency.lockutils [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1493.465709] env[65503]: DEBUG oslo_concurrency.lockutils [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1493.465851] env[65503]: DEBUG nova.compute.manager [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] No waiting events found dispatching network-vif-plugged-75add843-5b3a-4a5d-96b7-7d168ef23e39 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1493.466259] env[65503]: WARNING nova.compute.manager [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Received unexpected event network-vif-plugged-75add843-5b3a-4a5d-96b7-7d168ef23e39 for instance with vm_state building and task_state spawning. [ 1493.466259] env[65503]: DEBUG nova.compute.manager [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Received event network-changed-75add843-5b3a-4a5d-96b7-7d168ef23e39 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1493.466421] env[65503]: DEBUG nova.compute.manager [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Refreshing instance network info cache due to event network-changed-75add843-5b3a-4a5d-96b7-7d168ef23e39. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1493.466553] env[65503]: DEBUG oslo_concurrency.lockutils [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Acquiring lock "refresh_cache-e5c19364-d71b-4a55-983c-94ecb7b36893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.466626] env[65503]: DEBUG oslo_concurrency.lockutils [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Acquired lock "refresh_cache-e5c19364-d71b-4a55-983c-94ecb7b36893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1493.466764] env[65503]: DEBUG nova.network.neutron [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Refreshing network info cache for port 75add843-5b3a-4a5d-96b7-7d168ef23e39 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1493.643750] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "refresh_cache-e5c19364-d71b-4a55-983c-94ecb7b36893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.970248] env[65503]: WARNING neutronclient.v2_0.client [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1493.971082] env[65503]: WARNING openstack [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1493.971458] env[65503]: WARNING openstack [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1494.020589] env[65503]: DEBUG nova.network.neutron [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1494.130807] env[65503]: DEBUG nova.network.neutron [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1494.136564] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1494.136800] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1494.324376] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.324701] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.636802] env[65503]: DEBUG oslo_concurrency.lockutils [req-eda7433c-4250-448d-87d9-3aad697a3c95 req-f813bf52-ae8b-446c-a2c1-7500f829c22c service nova] Releasing lock "refresh_cache-e5c19364-d71b-4a55-983c-94ecb7b36893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1494.637295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "refresh_cache-e5c19364-d71b-4a55-983c-94ecb7b36893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1494.637480] env[65503]: DEBUG nova.network.neutron [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1494.638902] env[65503]: DEBUG nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1495.143103] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1495.143593] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1495.170025] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1495.170301] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1495.172309] env[65503]: INFO nova.compute.claims [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1495.183383] env[65503]: DEBUG nova.network.neutron [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1495.205220] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1495.205597] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1495.279136] env[65503]: WARNING neutronclient.v2_0.client [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1495.279976] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1495.280401] env[65503]: WARNING openstack [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1495.369399] env[65503]: DEBUG nova.network.neutron [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Updating instance_info_cache with network_info: [{"id": "75add843-5b3a-4a5d-96b7-7d168ef23e39", "address": "fa:16:3e:85:3f:eb", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75add843-5b", "ovs_interfaceid": "75add843-5b3a-4a5d-96b7-7d168ef23e39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1495.872122] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "refresh_cache-e5c19364-d71b-4a55-983c-94ecb7b36893" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1495.872517] env[65503]: DEBUG nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Instance network_info: |[{"id": "75add843-5b3a-4a5d-96b7-7d168ef23e39", "address": "fa:16:3e:85:3f:eb", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75add843-5b", "ovs_interfaceid": "75add843-5b3a-4a5d-96b7-7d168ef23e39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1495.872981] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:3f:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75add843-5b3a-4a5d-96b7-7d168ef23e39', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1495.880666] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1495.880885] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1495.881129] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30d27288-9763-4d7c-a162-0b9c92e8cb46 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.901555] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1495.901555] env[65503]: value = "task-4451133" [ 1495.901555] env[65503]: _type = "Task" [ 1495.901555] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.913206] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451133, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.240751] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c00cb1b-0e26-48c4-8ae8-b7904199614e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.248973] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828d8c55-10c4-48f4-b16e-3cd4d150fa2b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.280953] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac07e2a-c9f6-44cb-9127-d61add3e09ab {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.288625] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110cab70-df33-41f6-b3a2-915df88db2b5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.302557] env[65503]: DEBUG nova.compute.provider_tree [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.324098] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.412242] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451133, 'name': CreateVM_Task, 'duration_secs': 0.282217} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.412428] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1496.412918] env[65503]: WARNING neutronclient.v2_0.client [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1496.413295] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.413444] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1496.413746] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1496.413996] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-374c3d14-d087-4634-81b8-03d05770f735 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.418673] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1496.418673] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52841fff-e15e-bcb9-a882-8752967336ba" [ 1496.418673] env[65503]: _type = "Task" [ 1496.418673] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.427038] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52841fff-e15e-bcb9-a882-8752967336ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.810650] env[65503]: DEBUG nova.scheduler.client.report [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1496.929426] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52841fff-e15e-bcb9-a882-8752967336ba, 'name': SearchDatastore_Task, 'duration_secs': 0.01007} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.929761] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1496.929904] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1496.930173] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.930314] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1496.930522] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1496.930788] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a88fbe5-0f16-4e22-ba71-809e7153bd1a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.939546] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1496.939713] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1496.940447] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6da4eba-fb01-40a0-bfb3-8618e1f8c855 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.945704] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1496.945704] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52297633-506d-2137-4d27-f936c87fd057" [ 1496.945704] env[65503]: _type = "Task" [ 1496.945704] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.953099] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52297633-506d-2137-4d27-f936c87fd057, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.316630] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1497.317140] env[65503]: DEBUG nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1497.456736] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52297633-506d-2137-4d27-f936c87fd057, 'name': SearchDatastore_Task, 'duration_secs': 0.009064} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.457596] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9c67b6e-742a-4c1c-8de6-2a1479a71aa4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.463124] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1497.463124] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc1d29-c02a-4a8f-179e-0c9dc653d1a1" [ 1497.463124] env[65503]: _type = "Task" [ 1497.463124] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.471579] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc1d29-c02a-4a8f-179e-0c9dc653d1a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.822584] env[65503]: DEBUG nova.compute.utils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1497.824164] env[65503]: DEBUG nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1497.824402] env[65503]: DEBUG nova.network.neutron [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1497.824724] env[65503]: WARNING neutronclient.v2_0.client [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1497.825027] env[65503]: WARNING neutronclient.v2_0.client [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1497.825602] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1497.825929] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1497.868708] env[65503]: DEBUG nova.policy [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af1890ab617d443e985db57a798cac5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93906c603f7a4b18a34fc4b42fb6d6c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1497.974267] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52cc1d29-c02a-4a8f-179e-0c9dc653d1a1, 'name': SearchDatastore_Task, 'duration_secs': 0.009499} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.974687] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1497.974787] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e5c19364-d71b-4a55-983c-94ecb7b36893/e5c19364-d71b-4a55-983c-94ecb7b36893.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1497.975072] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73fdc63b-95ad-46a2-8d87-4606414a2251 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.983778] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1497.983778] env[65503]: value = "task-4451134" [ 1497.983778] env[65503]: _type = "Task" [ 1497.983778] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.993435] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451134, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.186314] env[65503]: DEBUG nova.network.neutron [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Successfully created port: 8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1498.324338] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1498.324539] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1498.333487] env[65503]: DEBUG nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1498.494411] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451134, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450441} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.494679] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] e5c19364-d71b-4a55-983c-94ecb7b36893/e5c19364-d71b-4a55-983c-94ecb7b36893.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1498.495300] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1498.495300] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d0d19d2-d370-4b3f-bed1-45dfd3e117d5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.504645] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1498.504645] env[65503]: value = "task-4451135" [ 1498.504645] env[65503]: _type = "Task" [ 1498.504645] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.514602] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451135, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.016027] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451135, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065811} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.016332] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1499.016958] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a0529c-9d2e-4519-a66f-2aea5460ce1e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.040417] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] e5c19364-d71b-4a55-983c-94ecb7b36893/e5c19364-d71b-4a55-983c-94ecb7b36893.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.040732] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37c4a23b-c4eb-4a58-88c3-85386924283c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.061019] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1499.061019] env[65503]: value = "task-4451136" [ 1499.061019] env[65503]: _type = "Task" [ 1499.061019] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.068962] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451136, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.319450] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1499.344103] env[65503]: DEBUG nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1499.369424] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1499.369688] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1499.369837] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1499.370025] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1499.370172] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1499.370312] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1499.370516] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1499.370673] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1499.370830] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1499.370982] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1499.371168] env[65503]: DEBUG nova.virt.hardware [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1499.372070] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86a3c14-b77c-4f38-aa79-6642ea6abacf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.380343] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da479c35-7823-4a2f-a5e4-3942d0da3dcd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.571729] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451136, 'name': ReconfigVM_Task, 'duration_secs': 0.28459} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.572024] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Reconfigured VM instance instance-0000007c to attach disk [datastore2] e5c19364-d71b-4a55-983c-94ecb7b36893/e5c19364-d71b-4a55-983c-94ecb7b36893.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1499.572677] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b40a31d8-757c-4caa-84c1-d08e6e8eed59 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.580061] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1499.580061] env[65503]: value = "task-4451137" [ 1499.580061] env[65503]: _type = "Task" [ 1499.580061] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.588932] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451137, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.605369] env[65503]: DEBUG nova.compute.manager [req-f3a0f6ba-f3f3-4428-bbb0-0ac1ab518317 req-29cbcea2-c45c-466a-aca0-167985104220 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Received event network-vif-plugged-8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1499.605369] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3a0f6ba-f3f3-4428-bbb0-0ac1ab518317 req-29cbcea2-c45c-466a-aca0-167985104220 service nova] Acquiring lock "0f2aec6a-692b-4fee-b0db-42e61146c4db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1499.605546] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3a0f6ba-f3f3-4428-bbb0-0ac1ab518317 req-29cbcea2-c45c-466a-aca0-167985104220 service nova] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1499.605587] env[65503]: DEBUG oslo_concurrency.lockutils [req-f3a0f6ba-f3f3-4428-bbb0-0ac1ab518317 req-29cbcea2-c45c-466a-aca0-167985104220 service nova] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.605762] env[65503]: DEBUG nova.compute.manager [req-f3a0f6ba-f3f3-4428-bbb0-0ac1ab518317 req-29cbcea2-c45c-466a-aca0-167985104220 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] No waiting events found dispatching network-vif-plugged-8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1499.605884] env[65503]: WARNING nova.compute.manager [req-f3a0f6ba-f3f3-4428-bbb0-0ac1ab518317 req-29cbcea2-c45c-466a-aca0-167985104220 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Received unexpected event network-vif-plugged-8c3efc5b-406f-42aa-8a85-06abcc142165 for instance with vm_state building and task_state spawning. [ 1499.712331] env[65503]: DEBUG nova.network.neutron [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Successfully updated port: 8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1500.092729] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451137, 'name': Rename_Task, 'duration_secs': 0.19194} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.093072] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1500.093276] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a4d3d03-41b9-46d8-b3b3-20bb3e4f6998 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.099943] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1500.099943] env[65503]: value = "task-4451138" [ 1500.099943] env[65503]: _type = "Task" [ 1500.099943] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.108112] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.214722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.214929] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1500.215081] env[65503]: DEBUG nova.network.neutron [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1500.323991] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1500.610511] env[65503]: DEBUG oslo_vmware.api [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451138, 'name': PowerOnVM_Task, 'duration_secs': 0.485569} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.610778] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1500.610988] env[65503]: INFO nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Took 7.84 seconds to spawn the instance on the hypervisor. [ 1500.611188] env[65503]: DEBUG nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1500.611970] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66362a8a-a8cf-4c1a-9e0e-193405e4f1bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.718635] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1500.719065] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1500.757412] env[65503]: DEBUG nova.network.neutron [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1500.778100] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1500.778498] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1500.827839] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.828117] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.828289] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1500.828434] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1500.829362] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49df30b-5c08-456d-96a8-79846991e274 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.838248] env[65503]: WARNING neutronclient.v2_0.client [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1500.838891] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1500.839699] env[65503]: WARNING openstack [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1500.847461] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14f4ba0-c7b0-4cbc-81fb-9e5c4d052a2a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.863330] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbf65ba-d507-4269-bdd5-2565b5dcf625 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.871613] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2b33b1-f786-48d2-a291-62850742a881 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.903194] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180301MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1500.903376] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.903586] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.953631] env[65503]: DEBUG nova.network.neutron [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updating instance_info_cache with network_info: [{"id": "8c3efc5b-406f-42aa-8a85-06abcc142165", "address": "fa:16:3e:cd:22:05", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c3efc5b-40", "ovs_interfaceid": "8c3efc5b-406f-42aa-8a85-06abcc142165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1501.128435] env[65503]: INFO nova.compute.manager [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Took 12.56 seconds to build instance. [ 1501.456026] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1501.456464] env[65503]: DEBUG nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Instance network_info: |[{"id": "8c3efc5b-406f-42aa-8a85-06abcc142165", "address": "fa:16:3e:cd:22:05", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c3efc5b-40", "ovs_interfaceid": "8c3efc5b-406f-42aa-8a85-06abcc142165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1501.457029] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:22:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c3efc5b-406f-42aa-8a85-06abcc142165', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1501.465269] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1501.465881] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1501.466169] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-403041fc-4e40-4fbf-8a72-15a772b58f2b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.486824] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1501.486824] env[65503]: value = "task-4451139" [ 1501.486824] env[65503]: _type = "Task" [ 1501.486824] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.495440] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451139, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.632431] env[65503]: DEBUG oslo_concurrency.lockutils [None req-bcdd31f5-08a1-4e54-9829-78060beb3992 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.068s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1501.634548] env[65503]: DEBUG nova.compute.manager [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Received event network-changed-8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1501.634764] env[65503]: DEBUG nova.compute.manager [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Refreshing instance network info cache due to event network-changed-8c3efc5b-406f-42aa-8a85-06abcc142165. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1501.634955] env[65503]: DEBUG oslo_concurrency.lockutils [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Acquiring lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.635103] env[65503]: DEBUG oslo_concurrency.lockutils [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Acquired lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1501.635257] env[65503]: DEBUG nova.network.neutron [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Refreshing network info cache for port 8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1501.935438] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 53e5de8f-1a2a-4b17-a3cd-43888dc70be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1501.935438] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance cc6a9305-bd7b-487d-98f3-f719e444e18c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1501.935633] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance e5c19364-d71b-4a55-983c-94ecb7b36893 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1501.935679] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Instance 0f2aec6a-692b-4fee-b0db-42e61146c4db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1501.935838] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1501.935981] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '4', 'num_vm_active': '3', 'num_task_None': '3', 'num_os_type_None': '4', 'num_proj_62e7660e16774c408729de84ba5c7534': '3', 'io_workload': '1', 'num_vm_building': '1', 'num_task_spawning': '1', 'num_proj_93906c603f7a4b18a34fc4b42fb6d6c1': '1'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1501.997596] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451139, 'name': CreateVM_Task} progress is 25%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.999078] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6af3f5-53c0-411f-baf0-38db3d400471 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.001617] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "e5c19364-d71b-4a55-983c-94ecb7b36893" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.001828] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1502.002022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "e5c19364-d71b-4a55-983c-94ecb7b36893-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.002216] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1502.002373] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1502.004357] env[65503]: INFO nova.compute.manager [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Terminating instance [ 1502.009162] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea394bc9-352d-4be9-8b6a-1eed4ac6bab4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.041438] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93de35c3-8f2e-43b4-85c9-66ca9a4be891 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.049626] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2774301-0e1a-4716-86af-a6f180ba5f90 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.063913] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1502.138224] env[65503]: WARNING neutronclient.v2_0.client [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1502.138844] env[65503]: WARNING openstack [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1502.139351] env[65503]: WARNING openstack [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1502.264043] env[65503]: WARNING openstack [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1502.264474] env[65503]: WARNING openstack [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1502.327077] env[65503]: WARNING neutronclient.v2_0.client [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1502.327922] env[65503]: WARNING openstack [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1502.328306] env[65503]: WARNING openstack [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1502.408728] env[65503]: DEBUG nova.network.neutron [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updated VIF entry in instance network info cache for port 8c3efc5b-406f-42aa-8a85-06abcc142165. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1502.409106] env[65503]: DEBUG nova.network.neutron [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updating instance_info_cache with network_info: [{"id": "8c3efc5b-406f-42aa-8a85-06abcc142165", "address": "fa:16:3e:cd:22:05", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c3efc5b-40", "ovs_interfaceid": "8c3efc5b-406f-42aa-8a85-06abcc142165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1502.498373] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451139, 'name': CreateVM_Task, 'duration_secs': 0.737649} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.498573] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1502.499032] env[65503]: WARNING neutronclient.v2_0.client [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1502.499470] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.499570] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1502.499916] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1502.500186] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0c5ebba-6742-41ec-8529-0ddebfcdc698 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.505116] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1502.505116] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5216aa60-0f8f-9087-80aa-01cdb214322a" [ 1502.505116] env[65503]: _type = "Task" [ 1502.505116] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.508979] env[65503]: DEBUG nova.compute.manager [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1502.509222] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1502.509971] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d7252a-4043-467f-a61f-be00d8d3aec8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.515101] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5216aa60-0f8f-9087-80aa-01cdb214322a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.519676] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1502.519908] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67a7853e-c6ea-45e0-abd3-e832a616dc19 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.527017] env[65503]: DEBUG oslo_vmware.api [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1502.527017] env[65503]: value = "task-4451140" [ 1502.527017] env[65503]: _type = "Task" [ 1502.527017] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.535363] env[65503]: DEBUG oslo_vmware.api [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451140, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.567131] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1502.911638] env[65503]: DEBUG oslo_concurrency.lockutils [req-746ada9f-2f29-4e25-a650-3a0cb3fbdeaf req-e16908ae-45de-4cc6-a968-458a5be709a4 service nova] Releasing lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1503.016179] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5216aa60-0f8f-9087-80aa-01cdb214322a, 'name': SearchDatastore_Task, 'duration_secs': 0.010453} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.016477] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1503.016706] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1503.016936] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.017086] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1503.017297] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1503.017748] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d96643d-ec19-4ab6-9a89-28d8f89c2c17 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.027535] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1503.027729] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1503.031589] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4142e291-763e-462b-ada8-fe5de6cfe86d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.037266] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1503.037266] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52235715-35b3-6e2a-0833-a24d89d8aefe" [ 1503.037266] env[65503]: _type = "Task" [ 1503.037266] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.040424] env[65503]: DEBUG oslo_vmware.api [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451140, 'name': PowerOffVM_Task, 'duration_secs': 0.201559} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.043826] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1503.043997] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1503.044280] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a69d9a52-5330-43fd-ad37-23b48b552151 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.051499] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52235715-35b3-6e2a-0833-a24d89d8aefe, 'name': SearchDatastore_Task, 'duration_secs': 0.010154} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.052385] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e939c1b-05b9-4c92-9b4c-b4b09d495b8b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.058356] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1503.058356] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]523ea8eb-496b-1b2d-2a8a-974d1b4c3fc6" [ 1503.058356] env[65503]: _type = "Task" [ 1503.058356] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.067774] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523ea8eb-496b-1b2d-2a8a-974d1b4c3fc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.072548] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1503.072722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.169s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1503.107063] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1503.107282] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1503.107497] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleting the datastore file [datastore2] e5c19364-d71b-4a55-983c-94ecb7b36893 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1503.107761] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc71bb81-ec1f-4e8a-9bca-ed0608147b91 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.114908] env[65503]: DEBUG oslo_vmware.api [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1503.114908] env[65503]: value = "task-4451142" [ 1503.114908] env[65503]: _type = "Task" [ 1503.114908] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.123439] env[65503]: DEBUG oslo_vmware.api [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.568976] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]523ea8eb-496b-1b2d-2a8a-974d1b4c3fc6, 'name': SearchDatastore_Task, 'duration_secs': 0.010095} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.569357] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1503.569651] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 0f2aec6a-692b-4fee-b0db-42e61146c4db/0f2aec6a-692b-4fee-b0db-42e61146c4db.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1503.569952] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61406a14-8886-414b-9802-84614df70706 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.578207] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1503.578207] env[65503]: value = "task-4451143" [ 1503.578207] env[65503]: _type = "Task" [ 1503.578207] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.587017] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.624999] env[65503]: DEBUG oslo_vmware.api [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132523} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.625271] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1503.625461] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1503.625636] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1503.625828] env[65503]: INFO nova.compute.manager [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1503.626074] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1503.626286] env[65503]: DEBUG nova.compute.manager [-] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1503.626379] env[65503]: DEBUG nova.network.neutron [-] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1503.626632] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1503.627175] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1503.627440] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1503.671730] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1503.970772] env[65503]: DEBUG nova.compute.manager [req-92855507-6ae3-45b1-823e-2de62777b5c5 req-81b4eef2-a98c-4e8d-90fa-4802bf95a356 service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Received event network-vif-deleted-75add843-5b3a-4a5d-96b7-7d168ef23e39 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1503.970916] env[65503]: INFO nova.compute.manager [req-92855507-6ae3-45b1-823e-2de62777b5c5 req-81b4eef2-a98c-4e8d-90fa-4802bf95a356 service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Neutron deleted interface 75add843-5b3a-4a5d-96b7-7d168ef23e39; detaching it from the instance and deleting it from the info cache [ 1503.971282] env[65503]: DEBUG nova.network.neutron [req-92855507-6ae3-45b1-823e-2de62777b5c5 req-81b4eef2-a98c-4e8d-90fa-4802bf95a356 service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1504.073118] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.088951] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468388} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.090074] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore1] 0f2aec6a-692b-4fee-b0db-42e61146c4db/0f2aec6a-692b-4fee-b0db-42e61146c4db.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1504.090290] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1504.090570] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aed3ddeb-f6ec-41e0-bcbd-4fb39a0f7118 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.098654] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1504.098654] env[65503]: value = "task-4451144" [ 1504.098654] env[65503]: _type = "Task" [ 1504.098654] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.107351] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.443757] env[65503]: DEBUG nova.network.neutron [-] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1504.475016] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8c85808-5495-4b4a-a579-f6164e446d2e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.486625] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6376e76-fce5-43bf-acb5-bf8eb9f4adda {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.518318] env[65503]: DEBUG nova.compute.manager [req-92855507-6ae3-45b1-823e-2de62777b5c5 req-81b4eef2-a98c-4e8d-90fa-4802bf95a356 service nova] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Detach interface failed, port_id=75add843-5b3a-4a5d-96b7-7d168ef23e39, reason: Instance e5c19364-d71b-4a55-983c-94ecb7b36893 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1504.578780] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.608396] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065222} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.608645] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1504.609463] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b96774-242c-4727-8ef6-c427ce3e8d88 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.631966] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 0f2aec6a-692b-4fee-b0db-42e61146c4db/0f2aec6a-692b-4fee-b0db-42e61146c4db.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1504.632213] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-016310a3-f159-4479-b37e-23be71f764b3 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.653369] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1504.653369] env[65503]: value = "task-4451145" [ 1504.653369] env[65503]: _type = "Task" [ 1504.653369] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.663561] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.947727] env[65503]: INFO nova.compute.manager [-] [instance: e5c19364-d71b-4a55-983c-94ecb7b36893] Took 1.32 seconds to deallocate network for instance. [ 1505.163698] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451145, 'name': ReconfigVM_Task, 'duration_secs': 0.26296} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.163934] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 0f2aec6a-692b-4fee-b0db-42e61146c4db/0f2aec6a-692b-4fee-b0db-42e61146c4db.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1505.164629] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b356f31d-5fe7-4c44-a81f-d16844b13fe1 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.171593] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1505.171593] env[65503]: value = "task-4451146" [ 1505.171593] env[65503]: _type = "Task" [ 1505.171593] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.179636] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451146, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.324065] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.454503] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1505.454786] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1505.455093] env[65503]: DEBUG nova.objects.instance [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'resources' on Instance uuid e5c19364-d71b-4a55-983c-94ecb7b36893 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1505.682873] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451146, 'name': Rename_Task, 'duration_secs': 0.142706} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.683267] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1505.683358] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa17308e-12d9-43cc-9470-3b1cf2200482 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.690940] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1505.690940] env[65503]: value = "task-4451147" [ 1505.690940] env[65503]: _type = "Task" [ 1505.690940] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.699154] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.040099] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16473056-bda4-491f-b332-f59fea4c4329 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.051801] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6c331e-c6d4-405b-9018-46b9ce3e9418 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.099552] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a149a0d-fdf1-4f45-919d-62ae635cac08 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.109767] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66e5101-d222-4474-8658-1484ada8864c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.124119] env[65503]: DEBUG nova.compute.provider_tree [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.201438] env[65503]: DEBUG oslo_vmware.api [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451147, 'name': PowerOnVM_Task, 'duration_secs': 0.450725} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.201635] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.201829] env[65503]: INFO nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Took 6.86 seconds to spawn the instance on the hypervisor. [ 1506.201994] env[65503]: DEBUG nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1506.202788] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49b4f71-6de1-43dd-97b2-94d0d73c4930 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.627257] env[65503]: DEBUG nova.scheduler.client.report [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1506.719482] env[65503]: INFO nova.compute.manager [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Took 11.57 seconds to build instance. [ 1507.134161] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.158385] env[65503]: INFO nova.scheduler.client.report [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted allocations for instance e5c19364-d71b-4a55-983c-94ecb7b36893 [ 1507.221659] env[65503]: DEBUG oslo_concurrency.lockutils [None req-11c0c53f-8dcf-47f5-b8de-553c7c3d78c0 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.085s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.288943] env[65503]: DEBUG nova.compute.manager [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Received event network-changed-8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1507.289159] env[65503]: DEBUG nova.compute.manager [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Refreshing instance network info cache due to event network-changed-8c3efc5b-406f-42aa-8a85-06abcc142165. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1507.289376] env[65503]: DEBUG oslo_concurrency.lockutils [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Acquiring lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.289545] env[65503]: DEBUG oslo_concurrency.lockutils [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Acquired lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1507.289744] env[65503]: DEBUG nova.network.neutron [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Refreshing network info cache for port 8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1507.671457] env[65503]: DEBUG oslo_concurrency.lockutils [None req-c4498181-3b4a-4e85-85e5-c7483c47e213 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "e5c19364-d71b-4a55-983c-94ecb7b36893" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.669s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.792673] env[65503]: WARNING neutronclient.v2_0.client [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1507.793623] env[65503]: WARNING openstack [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1507.793981] env[65503]: WARNING openstack [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1507.901912] env[65503]: WARNING openstack [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1507.902346] env[65503]: WARNING openstack [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1507.960642] env[65503]: WARNING neutronclient.v2_0.client [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1507.961334] env[65503]: WARNING openstack [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1507.961690] env[65503]: WARNING openstack [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1508.044786] env[65503]: DEBUG nova.network.neutron [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updated VIF entry in instance network info cache for port 8c3efc5b-406f-42aa-8a85-06abcc142165. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1508.045048] env[65503]: DEBUG nova.network.neutron [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updating instance_info_cache with network_info: [{"id": "8c3efc5b-406f-42aa-8a85-06abcc142165", "address": "fa:16:3e:cd:22:05", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c3efc5b-40", "ovs_interfaceid": "8c3efc5b-406f-42aa-8a85-06abcc142165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1508.547772] env[65503]: DEBUG oslo_concurrency.lockutils [req-dd232366-2681-41fe-8a72-dc9180423fa2 req-289c1ab5-0d57-47de-a4b8-6dede1b6b00f service nova] Releasing lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1508.645726] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "cc6a9305-bd7b-487d-98f3-f719e444e18c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1508.645994] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.646245] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "cc6a9305-bd7b-487d-98f3-f719e444e18c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1508.646421] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.646595] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1508.648846] env[65503]: INFO nova.compute.manager [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Terminating instance [ 1509.503264] env[65503]: DEBUG nova.compute.manager [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1509.503773] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.504364] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0c0b63-ab51-45db-b094-3e4636f54168 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.513608] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1509.513843] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18719209-0a55-4b99-8610-7dc84744416a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.522353] env[65503]: DEBUG oslo_vmware.api [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1509.522353] env[65503]: value = "task-4451148" [ 1509.522353] env[65503]: _type = "Task" [ 1509.522353] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.531373] env[65503]: DEBUG oslo_vmware.api [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.032650] env[65503]: DEBUG oslo_vmware.api [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451148, 'name': PowerOffVM_Task, 'duration_secs': 0.210495} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.032921] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1510.033091] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1510.033353] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf30aa78-8deb-4c90-9ba3-516a7747ae95 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.094169] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1510.094479] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1510.094682] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleting the datastore file [datastore1] cc6a9305-bd7b-487d-98f3-f719e444e18c {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.094948] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27996268-e149-4225-a323-a318f6430f4f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.103718] env[65503]: DEBUG oslo_vmware.api [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1510.103718] env[65503]: value = "task-4451150" [ 1510.103718] env[65503]: _type = "Task" [ 1510.103718] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.112402] env[65503]: DEBUG oslo_vmware.api [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.614420] env[65503]: DEBUG oslo_vmware.api [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155982} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.614807] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1510.614879] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1510.614993] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.615182] env[65503]: INFO nova.compute.manager [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1510.615413] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1510.615600] env[65503]: DEBUG nova.compute.manager [-] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1510.615693] env[65503]: DEBUG nova.network.neutron [-] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1510.615927] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1510.616470] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1510.616738] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1510.652010] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1510.926093] env[65503]: DEBUG nova.compute.manager [req-962f7ffe-810c-4728-89c2-ffc36c1a1f1c req-db07856f-84b7-49f5-b1f3-21b5b39606f4 service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Received event network-vif-deleted-f566fe60-8ce2-4400-8738-f877c5dd3510 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1510.926262] env[65503]: INFO nova.compute.manager [req-962f7ffe-810c-4728-89c2-ffc36c1a1f1c req-db07856f-84b7-49f5-b1f3-21b5b39606f4 service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Neutron deleted interface f566fe60-8ce2-4400-8738-f877c5dd3510; detaching it from the instance and deleting it from the info cache [ 1510.926429] env[65503]: DEBUG nova.network.neutron [req-962f7ffe-810c-4728-89c2-ffc36c1a1f1c req-db07856f-84b7-49f5-b1f3-21b5b39606f4 service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1511.401104] env[65503]: DEBUG nova.network.neutron [-] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1511.430054] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07147951-17c3-455f-9ef5-9c2a2c4c9e51 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.439950] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4244f2-21a6-4dd2-840a-17e2fa72fa70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.467251] env[65503]: DEBUG nova.compute.manager [req-962f7ffe-810c-4728-89c2-ffc36c1a1f1c req-db07856f-84b7-49f5-b1f3-21b5b39606f4 service nova] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Detach interface failed, port_id=f566fe60-8ce2-4400-8738-f877c5dd3510, reason: Instance cc6a9305-bd7b-487d-98f3-f719e444e18c could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1511.904271] env[65503]: INFO nova.compute.manager [-] [instance: cc6a9305-bd7b-487d-98f3-f719e444e18c] Took 1.29 seconds to deallocate network for instance. [ 1512.412377] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1512.412782] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1512.413131] env[65503]: DEBUG nova.objects.instance [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'resources' on Instance uuid cc6a9305-bd7b-487d-98f3-f719e444e18c {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1512.970679] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63087104-e482-4cf1-b2e3-77579ce5891d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.978768] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8499252e-6a31-4f74-a8a0-d6a45e27c7e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.008425] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe0fa41-b938-44d5-b4b3-3ec235398765 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.016419] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d94455b-92d7-40bc-b9ce-1b7f0060a47c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.030390] env[65503]: DEBUG nova.compute.provider_tree [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1513.533968] env[65503]: DEBUG nova.scheduler.client.report [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1514.039350] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.626s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1514.060438] env[65503]: INFO nova.scheduler.client.report [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted allocations for instance cc6a9305-bd7b-487d-98f3-f719e444e18c [ 1514.569212] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3f990c11-298d-42f0-ae11-ec548482ecd6 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "cc6a9305-bd7b-487d-98f3-f719e444e18c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.923s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1515.746050] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "05012e7b-e08f-4e35-9154-251558478659" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1515.746512] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "05012e7b-e08f-4e35-9154-251558478659" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1516.250237] env[65503]: DEBUG nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1516.776504] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1516.776788] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1516.778283] env[65503]: INFO nova.compute.claims [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1517.836937] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e609be-db2e-4bd1-8ef2-f8d4d29c284a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.845051] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce4f76f-8239-438d-9857-228a97cea305 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.875520] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aec2c51-d4c4-42b8-b5fe-2b2a59e83a30 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.883389] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34999ae-f09d-47cc-9dec-53052770163e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.896680] env[65503]: DEBUG nova.compute.provider_tree [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.400348] env[65503]: DEBUG nova.scheduler.client.report [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1518.905162] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.128s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1518.905727] env[65503]: DEBUG nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1519.410870] env[65503]: DEBUG nova.compute.utils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1519.412476] env[65503]: DEBUG nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1519.412667] env[65503]: DEBUG nova.network.neutron [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1519.413036] env[65503]: WARNING neutronclient.v2_0.client [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1519.413354] env[65503]: WARNING neutronclient.v2_0.client [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1519.413937] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1519.414289] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1519.480414] env[65503]: DEBUG nova.policy [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3299e99bda746c89cd71759d037fd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e7660e16774c408729de84ba5c7534', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1519.772934] env[65503]: DEBUG nova.network.neutron [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Successfully created port: a4356152-7462-46a7-b26d-051b9db1302c {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1519.922656] env[65503]: DEBUG nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1520.933288] env[65503]: DEBUG nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1520.960964] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1520.961374] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1520.961647] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1520.961928] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1520.962105] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1520.962254] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1520.962457] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1520.962612] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1520.962775] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1520.962930] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1520.963120] env[65503]: DEBUG nova.virt.hardware [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1520.964049] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f9c3b3-4ce6-4e42-8165-a031adaa590a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.973776] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be59602-82af-4949-acc8-2ff6a6c7871c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.168048] env[65503]: DEBUG nova.compute.manager [req-130aabee-908c-440a-ab6e-7b95616616bc req-fedb8895-ac92-4c94-b793-c399b4210496 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Received event network-vif-plugged-a4356152-7462-46a7-b26d-051b9db1302c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1521.168533] env[65503]: DEBUG oslo_concurrency.lockutils [req-130aabee-908c-440a-ab6e-7b95616616bc req-fedb8895-ac92-4c94-b793-c399b4210496 service nova] Acquiring lock "05012e7b-e08f-4e35-9154-251558478659-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1521.168950] env[65503]: DEBUG oslo_concurrency.lockutils [req-130aabee-908c-440a-ab6e-7b95616616bc req-fedb8895-ac92-4c94-b793-c399b4210496 service nova] Lock "05012e7b-e08f-4e35-9154-251558478659-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1521.169279] env[65503]: DEBUG oslo_concurrency.lockutils [req-130aabee-908c-440a-ab6e-7b95616616bc req-fedb8895-ac92-4c94-b793-c399b4210496 service nova] Lock "05012e7b-e08f-4e35-9154-251558478659-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1521.169526] env[65503]: DEBUG nova.compute.manager [req-130aabee-908c-440a-ab6e-7b95616616bc req-fedb8895-ac92-4c94-b793-c399b4210496 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] No waiting events found dispatching network-vif-plugged-a4356152-7462-46a7-b26d-051b9db1302c {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1521.169688] env[65503]: WARNING nova.compute.manager [req-130aabee-908c-440a-ab6e-7b95616616bc req-fedb8895-ac92-4c94-b793-c399b4210496 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Received unexpected event network-vif-plugged-a4356152-7462-46a7-b26d-051b9db1302c for instance with vm_state building and task_state spawning. [ 1521.251994] env[65503]: DEBUG nova.network.neutron [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Successfully updated port: a4356152-7462-46a7-b26d-051b9db1302c {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1521.755329] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "refresh_cache-05012e7b-e08f-4e35-9154-251558478659" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.755448] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "refresh_cache-05012e7b-e08f-4e35-9154-251558478659" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1521.755559] env[65503]: DEBUG nova.network.neutron [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1522.258314] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1522.258708] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1522.298599] env[65503]: DEBUG nova.network.neutron [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1522.318963] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1522.319366] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1522.383679] env[65503]: WARNING neutronclient.v2_0.client [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1522.384398] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1522.384765] env[65503]: WARNING openstack [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1522.468258] env[65503]: DEBUG nova.network.neutron [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Updating instance_info_cache with network_info: [{"id": "a4356152-7462-46a7-b26d-051b9db1302c", "address": "fa:16:3e:35:7e:22", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4356152-74", "ovs_interfaceid": "a4356152-7462-46a7-b26d-051b9db1302c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1522.970986] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "refresh_cache-05012e7b-e08f-4e35-9154-251558478659" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1522.971352] env[65503]: DEBUG nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Instance network_info: |[{"id": "a4356152-7462-46a7-b26d-051b9db1302c", "address": "fa:16:3e:35:7e:22", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4356152-74", "ovs_interfaceid": "a4356152-7462-46a7-b26d-051b9db1302c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1522.971861] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:7e:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4356152-7462-46a7-b26d-051b9db1302c', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1522.979424] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1522.979719] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05012e7b-e08f-4e35-9154-251558478659] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1522.979922] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f9c88f4-52b9-47d4-9cb7-498c8c9c1786 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.000746] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1523.000746] env[65503]: value = "task-4451151" [ 1523.000746] env[65503]: _type = "Task" [ 1523.000746] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.008428] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451151, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.197063] env[65503]: DEBUG nova.compute.manager [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Received event network-changed-a4356152-7462-46a7-b26d-051b9db1302c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1523.197296] env[65503]: DEBUG nova.compute.manager [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Refreshing instance network info cache due to event network-changed-a4356152-7462-46a7-b26d-051b9db1302c. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1523.197511] env[65503]: DEBUG oslo_concurrency.lockutils [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Acquiring lock "refresh_cache-05012e7b-e08f-4e35-9154-251558478659" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.197650] env[65503]: DEBUG oslo_concurrency.lockutils [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Acquired lock "refresh_cache-05012e7b-e08f-4e35-9154-251558478659" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1523.197800] env[65503]: DEBUG nova.network.neutron [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Refreshing network info cache for port a4356152-7462-46a7-b26d-051b9db1302c {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1523.510826] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451151, 'name': CreateVM_Task, 'duration_secs': 0.306288} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.511227] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05012e7b-e08f-4e35-9154-251558478659] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1523.511486] env[65503]: WARNING neutronclient.v2_0.client [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1523.511838] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.511997] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1523.512322] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1523.512579] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d0c1f17-19b7-422e-a8d7-9a17dc473369 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.517722] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1523.517722] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52660453-6d7a-c329-3806-8bf0252ace07" [ 1523.517722] env[65503]: _type = "Task" [ 1523.517722] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.525780] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52660453-6d7a-c329-3806-8bf0252ace07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.702736] env[65503]: WARNING neutronclient.v2_0.client [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1523.703431] env[65503]: WARNING openstack [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1523.703780] env[65503]: WARNING openstack [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1523.793715] env[65503]: WARNING openstack [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1523.794110] env[65503]: WARNING openstack [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1523.849018] env[65503]: WARNING neutronclient.v2_0.client [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1523.849709] env[65503]: WARNING openstack [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1523.850090] env[65503]: WARNING openstack [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1523.928361] env[65503]: DEBUG nova.network.neutron [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Updated VIF entry in instance network info cache for port a4356152-7462-46a7-b26d-051b9db1302c. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1523.928748] env[65503]: DEBUG nova.network.neutron [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Updating instance_info_cache with network_info: [{"id": "a4356152-7462-46a7-b26d-051b9db1302c", "address": "fa:16:3e:35:7e:22", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4356152-74", "ovs_interfaceid": "a4356152-7462-46a7-b26d-051b9db1302c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1524.028557] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52660453-6d7a-c329-3806-8bf0252ace07, 'name': SearchDatastore_Task, 'duration_secs': 0.009914} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.028792] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1524.029023] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1524.029259] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.029400] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1524.029570] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1524.029866] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd477ab9-f723-4fd5-a3c9-cc89e2233481 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.039008] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1524.039202] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1524.039957] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eae92394-5a1f-46d2-97fe-c2ca211e3f3f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.045808] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1524.045808] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52008b39-dc7b-cc05-56df-9e81d60be75d" [ 1524.045808] env[65503]: _type = "Task" [ 1524.045808] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.053593] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52008b39-dc7b-cc05-56df-9e81d60be75d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.432148] env[65503]: DEBUG oslo_concurrency.lockutils [req-a2db3008-157f-4b07-9aaf-072280ab1867 req-09952312-2019-4e54-bb14-ceb70d78eeb8 service nova] Releasing lock "refresh_cache-05012e7b-e08f-4e35-9154-251558478659" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1524.557710] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52008b39-dc7b-cc05-56df-9e81d60be75d, 'name': SearchDatastore_Task, 'duration_secs': 0.009343} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.558396] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6479ae89-67f2-455b-a095-e3660c5f0f47 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.564218] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1524.564218] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5266b547-f913-19ba-6bcf-625c26d97b86" [ 1524.564218] env[65503]: _type = "Task" [ 1524.564218] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.574533] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5266b547-f913-19ba-6bcf-625c26d97b86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.075569] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5266b547-f913-19ba-6bcf-625c26d97b86, 'name': SearchDatastore_Task, 'duration_secs': 0.009967} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.075798] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1525.076034] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 05012e7b-e08f-4e35-9154-251558478659/05012e7b-e08f-4e35-9154-251558478659.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1525.076310] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0683bf2b-04c1-4a1c-b7fe-9894a760abb8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.084791] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1525.084791] env[65503]: value = "task-4451152" [ 1525.084791] env[65503]: _type = "Task" [ 1525.084791] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.093210] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.595578] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441504} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.595967] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] 05012e7b-e08f-4e35-9154-251558478659/05012e7b-e08f-4e35-9154-251558478659.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1525.596115] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1525.596293] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-727827d3-af69-4a0f-a0d7-fdfcd46e0368 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.603606] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1525.603606] env[65503]: value = "task-4451153" [ 1525.603606] env[65503]: _type = "Task" [ 1525.603606] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.611904] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.113673] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063543} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.113961] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1526.114704] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7259df1-4a65-4fa7-aac2-dfe087994458 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.136703] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 05012e7b-e08f-4e35-9154-251558478659/05012e7b-e08f-4e35-9154-251558478659.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.136925] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-240be054-9533-4768-b55a-16660c7af81b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.156962] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1526.156962] env[65503]: value = "task-4451154" [ 1526.156962] env[65503]: _type = "Task" [ 1526.156962] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.164784] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451154, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.667477] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451154, 'name': ReconfigVM_Task, 'duration_secs': 0.296668} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.667911] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 05012e7b-e08f-4e35-9154-251558478659/05012e7b-e08f-4e35-9154-251558478659.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1526.668495] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38ccf5a4-d71e-4694-a9aa-328c61d7c220 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.676045] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1526.676045] env[65503]: value = "task-4451155" [ 1526.676045] env[65503]: _type = "Task" [ 1526.676045] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.684526] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451155, 'name': Rename_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.186506] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451155, 'name': Rename_Task, 'duration_secs': 0.1414} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.186772] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1527.187030] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89634f65-2565-43e1-a2a8-39645c093b3c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.194191] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1527.194191] env[65503]: value = "task-4451156" [ 1527.194191] env[65503]: _type = "Task" [ 1527.194191] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.202094] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.705114] env[65503]: DEBUG oslo_vmware.api [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451156, 'name': PowerOnVM_Task, 'duration_secs': 0.447297} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.705114] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1527.705114] env[65503]: INFO nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Took 6.77 seconds to spawn the instance on the hypervisor. [ 1527.705114] env[65503]: DEBUG nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1527.705877] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47e92a7-36ac-4aeb-8146-98e3426bc1d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.224620] env[65503]: INFO nova.compute.manager [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Took 11.47 seconds to build instance. [ 1528.726286] env[65503]: DEBUG oslo_concurrency.lockutils [None req-5cecad2d-4fea-4d70-ad38-e963147ae817 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "05012e7b-e08f-4e35-9154-251558478659" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.980s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1529.040504] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "05012e7b-e08f-4e35-9154-251558478659" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1529.040745] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "05012e7b-e08f-4e35-9154-251558478659" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1529.040970] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "05012e7b-e08f-4e35-9154-251558478659-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1529.041184] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "05012e7b-e08f-4e35-9154-251558478659-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1529.041347] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "05012e7b-e08f-4e35-9154-251558478659-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1529.044039] env[65503]: INFO nova.compute.manager [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Terminating instance [ 1529.548273] env[65503]: DEBUG nova.compute.manager [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1529.548503] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1529.549763] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed05533f-ec24-443f-8d06-3e60f49f859a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.557734] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1529.557977] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f29b337-7a8a-4e51-8092-c022bd24ec20 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.564776] env[65503]: DEBUG oslo_vmware.api [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1529.564776] env[65503]: value = "task-4451157" [ 1529.564776] env[65503]: _type = "Task" [ 1529.564776] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.573569] env[65503]: DEBUG oslo_vmware.api [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.074710] env[65503]: DEBUG oslo_vmware.api [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451157, 'name': PowerOffVM_Task, 'duration_secs': 0.198195} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.075156] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1530.075156] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1530.075420] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8127125-d065-4c2d-94f4-59ad85b16fc8 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.143342] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1530.143579] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1530.143821] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleting the datastore file [datastore2] 05012e7b-e08f-4e35-9154-251558478659 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1530.144192] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59c57367-07b7-421e-b908-4f6adf856b85 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.152245] env[65503]: DEBUG oslo_vmware.api [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1530.152245] env[65503]: value = "task-4451159" [ 1530.152245] env[65503]: _type = "Task" [ 1530.152245] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.160981] env[65503]: DEBUG oslo_vmware.api [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451159, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.662748] env[65503]: DEBUG oslo_vmware.api [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451159, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155448} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.662993] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1530.663190] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1530.663371] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1530.663541] env[65503]: INFO nova.compute.manager [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 05012e7b-e08f-4e35-9154-251558478659] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1530.663767] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1530.663955] env[65503]: DEBUG nova.compute.manager [-] [instance: 05012e7b-e08f-4e35-9154-251558478659] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1530.664061] env[65503]: DEBUG nova.network.neutron [-] [instance: 05012e7b-e08f-4e35-9154-251558478659] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1530.664303] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1530.664854] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1530.665142] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1530.737729] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1530.987283] env[65503]: DEBUG nova.compute.manager [req-6362b352-935f-46c9-8e7a-cb8b25911500 req-adf0a9c9-df45-4763-8192-1b65c4fc3632 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Received event network-vif-deleted-a4356152-7462-46a7-b26d-051b9db1302c {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1530.987458] env[65503]: INFO nova.compute.manager [req-6362b352-935f-46c9-8e7a-cb8b25911500 req-adf0a9c9-df45-4763-8192-1b65c4fc3632 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Neutron deleted interface a4356152-7462-46a7-b26d-051b9db1302c; detaching it from the instance and deleting it from the info cache [ 1530.987657] env[65503]: DEBUG nova.network.neutron [req-6362b352-935f-46c9-8e7a-cb8b25911500 req-adf0a9c9-df45-4763-8192-1b65c4fc3632 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1531.466929] env[65503]: DEBUG nova.network.neutron [-] [instance: 05012e7b-e08f-4e35-9154-251558478659] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1531.490852] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd7f3ab9-48c7-4fe0-9dac-ce54e398f5d4 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.504734] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ddff25-17dd-4374-bee4-cf8b3d4c2c67 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.535011] env[65503]: DEBUG nova.compute.manager [req-6362b352-935f-46c9-8e7a-cb8b25911500 req-adf0a9c9-df45-4763-8192-1b65c4fc3632 service nova] [instance: 05012e7b-e08f-4e35-9154-251558478659] Detach interface failed, port_id=a4356152-7462-46a7-b26d-051b9db1302c, reason: Instance 05012e7b-e08f-4e35-9154-251558478659 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1531.971053] env[65503]: INFO nova.compute.manager [-] [instance: 05012e7b-e08f-4e35-9154-251558478659] Took 1.31 seconds to deallocate network for instance. [ 1532.477022] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1532.477362] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1532.477611] env[65503]: DEBUG nova.objects.instance [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'resources' on Instance uuid 05012e7b-e08f-4e35-9154-251558478659 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1533.036029] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9e3604-66ae-42cb-a183-ba5706ba83bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.043935] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fe8046-9963-48f4-b1a2-97b3b3618d62 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.073917] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb9af9a-1175-4e79-95c4-a8331b4a9d9f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.081652] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a74450-a6f8-4bd2-9fd5-1904569e560f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.094862] env[65503]: DEBUG nova.compute.provider_tree [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1533.598052] env[65503]: DEBUG nova.scheduler.client.report [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1534.104073] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.627s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1534.122539] env[65503]: INFO nova.scheduler.client.report [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted allocations for instance 05012e7b-e08f-4e35-9154-251558478659 [ 1534.631079] env[65503]: DEBUG oslo_concurrency.lockutils [None req-8e4601e0-55cc-4eb8-a5ed-6e9c6c5c89f7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "05012e7b-e08f-4e35-9154-251558478659" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.590s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1536.073802] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1536.074165] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1536.576298] env[65503]: DEBUG nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Starting instance... {{(pid=65503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1537.100854] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1537.101166] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1537.102668] env[65503]: INFO nova.compute.claims [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1538.165035] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a312d234-5bb0-4fa2-9cc4-cd53a815a28a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.173078] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3953d2a1-824d-46cf-84a7-a77fabf72b05 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.203655] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47616ffc-d90a-48ae-9d18-6161be05f6ce {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.211299] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548d7682-c347-40ca-adc4-79ad903de13b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.224503] env[65503]: DEBUG nova.compute.provider_tree [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.728056] env[65503]: DEBUG nova.scheduler.client.report [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1539.233617] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.132s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1539.234298] env[65503]: DEBUG nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Start building networks asynchronously for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1539.739314] env[65503]: DEBUG nova.compute.utils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Using /dev/sd instead of None {{(pid=65503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1539.740801] env[65503]: DEBUG nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Allocating IP information in the background. {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1539.741054] env[65503]: DEBUG nova.network.neutron [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] allocate_for_instance() {{(pid=65503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1539.741768] env[65503]: WARNING neutronclient.v2_0.client [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1539.742223] env[65503]: WARNING neutronclient.v2_0.client [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1539.742796] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1539.743144] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1539.751216] env[65503]: DEBUG nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Start building block device mappings for instance. {{(pid=65503) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1539.799864] env[65503]: DEBUG nova.policy [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3299e99bda746c89cd71759d037fd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e7660e16774c408729de84ba5c7534', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65503) authorize /opt/stack/nova/nova/policy.py:192}} [ 1540.100896] env[65503]: DEBUG nova.network.neutron [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Successfully created port: 89e0bccc-bb9e-4366-a74f-e5a899941a4b {{(pid=65503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1540.760857] env[65503]: DEBUG nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Start spawning the instance on the hypervisor. {{(pid=65503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1540.787192] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-14T15:45:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-14T15:45:12Z,direct_url=,disk_format='vmdk',id=d68ffece-ab91-4610-b535-fa1fb25ade93,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='e49f34d3bf1942dc9c4bf5ee4810f103',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-14T15:45:13Z,virtual_size=,visibility=), allow threads: False {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1540.787482] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1540.787635] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image limits 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1540.787823] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Flavor pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1540.787963] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Image pref 0:0:0 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1540.788124] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1540.788324] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1540.788474] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1540.788630] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Got 1 possible topologies {{(pid=65503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1540.788782] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1540.788942] env[65503]: DEBUG nova.virt.hardware [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1540.789863] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46be1ac9-d171-46cd-a176-8c16a90db184 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.798801] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c86bb0-8e23-4abb-94e9-dfb9d3de7038 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.513080] env[65503]: DEBUG nova.compute.manager [req-49045afd-4f44-47d5-a9f5-496f2889258a req-7295b828-1f20-4ac0-b2a1-97047c7ca8c1 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Received event network-vif-plugged-89e0bccc-bb9e-4366-a74f-e5a899941a4b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1541.513325] env[65503]: DEBUG oslo_concurrency.lockutils [req-49045afd-4f44-47d5-a9f5-496f2889258a req-7295b828-1f20-4ac0-b2a1-97047c7ca8c1 service nova] Acquiring lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1541.513527] env[65503]: DEBUG oslo_concurrency.lockutils [req-49045afd-4f44-47d5-a9f5-496f2889258a req-7295b828-1f20-4ac0-b2a1-97047c7ca8c1 service nova] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1541.513686] env[65503]: DEBUG oslo_concurrency.lockutils [req-49045afd-4f44-47d5-a9f5-496f2889258a req-7295b828-1f20-4ac0-b2a1-97047c7ca8c1 service nova] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1541.513844] env[65503]: DEBUG nova.compute.manager [req-49045afd-4f44-47d5-a9f5-496f2889258a req-7295b828-1f20-4ac0-b2a1-97047c7ca8c1 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] No waiting events found dispatching network-vif-plugged-89e0bccc-bb9e-4366-a74f-e5a899941a4b {{(pid=65503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1541.513998] env[65503]: WARNING nova.compute.manager [req-49045afd-4f44-47d5-a9f5-496f2889258a req-7295b828-1f20-4ac0-b2a1-97047c7ca8c1 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Received unexpected event network-vif-plugged-89e0bccc-bb9e-4366-a74f-e5a899941a4b for instance with vm_state building and task_state spawning. [ 1541.578516] env[65503]: DEBUG nova.network.neutron [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Successfully updated port: 89e0bccc-bb9e-4366-a74f-e5a899941a4b {{(pid=65503) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1542.081417] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "refresh_cache-d3d0d93b-f41a-468f-ab9c-b301c40301e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.081656] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "refresh_cache-d3d0d93b-f41a-468f-ab9c-b301c40301e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1542.081869] env[65503]: DEBUG nova.network.neutron [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1542.584573] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1542.584951] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1542.620062] env[65503]: DEBUG nova.network.neutron [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Instance cache missing network info. {{(pid=65503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3424}} [ 1542.639145] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1542.639536] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1542.699148] env[65503]: WARNING neutronclient.v2_0.client [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1542.699831] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1542.700294] env[65503]: WARNING openstack [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1542.781639] env[65503]: DEBUG nova.network.neutron [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Updating instance_info_cache with network_info: [{"id": "89e0bccc-bb9e-4366-a74f-e5a899941a4b", "address": "fa:16:3e:14:c8:00", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e0bccc-bb", "ovs_interfaceid": "89e0bccc-bb9e-4366-a74f-e5a899941a4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1543.285153] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "refresh_cache-d3d0d93b-f41a-468f-ab9c-b301c40301e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1543.285572] env[65503]: DEBUG nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Instance network_info: |[{"id": "89e0bccc-bb9e-4366-a74f-e5a899941a4b", "address": "fa:16:3e:14:c8:00", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e0bccc-bb", "ovs_interfaceid": "89e0bccc-bb9e-4366-a74f-e5a899941a4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1543.286041] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:c8:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89e0bccc-bb9e-4366-a74f-e5a899941a4b', 'vif_model': 'vmxnet3'}] {{(pid=65503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1543.294110] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1543.294384] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Creating VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1543.294624] env[65503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-744597cd-0e03-4cc6-ac13-8b99bf35624e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.315525] env[65503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1543.315525] env[65503]: value = "task-4451160" [ 1543.315525] env[65503]: _type = "Task" [ 1543.315525] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.324156] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451160, 'name': CreateVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.549844] env[65503]: DEBUG nova.compute.manager [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Received event network-changed-89e0bccc-bb9e-4366-a74f-e5a899941a4b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1543.550074] env[65503]: DEBUG nova.compute.manager [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Refreshing instance network info cache due to event network-changed-89e0bccc-bb9e-4366-a74f-e5a899941a4b. {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1543.550321] env[65503]: DEBUG oslo_concurrency.lockutils [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Acquiring lock "refresh_cache-d3d0d93b-f41a-468f-ab9c-b301c40301e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.550454] env[65503]: DEBUG oslo_concurrency.lockutils [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Acquired lock "refresh_cache-d3d0d93b-f41a-468f-ab9c-b301c40301e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.550683] env[65503]: DEBUG nova.network.neutron [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Refreshing network info cache for port 89e0bccc-bb9e-4366-a74f-e5a899941a4b {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2093}} [ 1543.825279] env[65503]: DEBUG oslo_vmware.api [-] Task: {'id': task-4451160, 'name': CreateVM_Task, 'duration_secs': 0.307694} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.825478] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Created VM on the ESX host {{(pid=65503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1543.825895] env[65503]: WARNING neutronclient.v2_0.client [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1543.826271] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.826418] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.826730] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1543.826981] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ee1691-ca27-4c63-a754-cf20404d49a5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.832815] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1543.832815] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d6c65-db35-77bd-a35b-b73e82c61810" [ 1543.832815] env[65503]: _type = "Task" [ 1543.832815] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.840946] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d6c65-db35-77bd-a35b-b73e82c61810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.054036] env[65503]: WARNING neutronclient.v2_0.client [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1544.054782] env[65503]: WARNING openstack [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1544.055208] env[65503]: WARNING openstack [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1544.145794] env[65503]: WARNING openstack [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1544.146209] env[65503]: WARNING openstack [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1544.205505] env[65503]: WARNING neutronclient.v2_0.client [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1544.206246] env[65503]: WARNING openstack [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1544.206654] env[65503]: WARNING openstack [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1544.300489] env[65503]: DEBUG nova.network.neutron [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Updated VIF entry in instance network info cache for port 89e0bccc-bb9e-4366-a74f-e5a899941a4b. {{(pid=65503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3584}} [ 1544.300851] env[65503]: DEBUG nova.network.neutron [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Updating instance_info_cache with network_info: [{"id": "89e0bccc-bb9e-4366-a74f-e5a899941a4b", "address": "fa:16:3e:14:c8:00", "network": {"id": "f28daa82-49cf-4d2e-9cbc-5d47417ddfab", "bridge": "br-int", "label": "tempest-ServersTestJSON-231164045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e7660e16774c408729de84ba5c7534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e0bccc-bb", "ovs_interfaceid": "89e0bccc-bb9e-4366-a74f-e5a899941a4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1544.345654] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]529d6c65-db35-77bd-a35b-b73e82c61810, 'name': SearchDatastore_Task, 'duration_secs': 0.010117} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.346023] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.346307] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Processing image d68ffece-ab91-4610-b535-fa1fb25ade93 {{(pid=65503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1544.346571] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.346713] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1544.346913] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1544.347232] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-950e4241-8c5b-438b-9afa-461598865c60 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.356598] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1544.356786] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1544.357528] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b850673a-9191-432c-b0d0-139e04d5a7fb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.363186] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1544.363186] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]52415810-825e-aeb6-0c92-cb1ab1ec065d" [ 1544.363186] env[65503]: _type = "Task" [ 1544.363186] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.371277] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52415810-825e-aeb6-0c92-cb1ab1ec065d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.739944] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1544.740251] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1544.740440] env[65503]: DEBUG nova.compute.manager [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1544.741707] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef0c78c-ce76-4d56-8e65-8e359b7834df {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.748675] env[65503]: DEBUG nova.compute.manager [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1544.749251] env[65503]: DEBUG nova.objects.instance [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'flavor' on Instance uuid 0f2aec6a-692b-4fee-b0db-42e61146c4db {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1544.803700] env[65503]: DEBUG oslo_concurrency.lockutils [req-079d936f-c268-400d-9a8b-d985bebf5d93 req-d5b6194b-f7d1-4674-8cba-29e29fd101f5 service nova] Releasing lock "refresh_cache-d3d0d93b-f41a-468f-ab9c-b301c40301e7" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.873809] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]52415810-825e-aeb6-0c92-cb1ab1ec065d, 'name': SearchDatastore_Task, 'duration_secs': 0.009682} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.874565] env[65503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63de1bac-32ab-4c68-b8a1-08496962abb7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.880378] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1544.880378] env[65503]: value = "session[52ca68d6-9baf-b27a-a70d-300f2615599e]5270c221-5997-8cdf-21f6-a6d75dc619a1" [ 1544.880378] env[65503]: _type = "Task" [ 1544.880378] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.888680] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5270c221-5997-8cdf-21f6-a6d75dc619a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.391927] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': session[52ca68d6-9baf-b27a-a70d-300f2615599e]5270c221-5997-8cdf-21f6-a6d75dc619a1, 'name': SearchDatastore_Task, 'duration_secs': 0.010095} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.392223] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1545.392476] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] d3d0d93b-f41a-468f-ab9c-b301c40301e7/d3d0d93b-f41a-468f-ab9c-b301c40301e7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1545.392744] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb03eb6b-5ea1-4e22-8599-4ca3a02b0ec6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.399704] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1545.399704] env[65503]: value = "task-4451161" [ 1545.399704] env[65503]: _type = "Task" [ 1545.399704] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.408068] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.760863] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1545.761205] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4f4191c-42e7-444d-b911-2233e609f739 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.770909] env[65503]: DEBUG oslo_vmware.api [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1545.770909] env[65503]: value = "task-4451162" [ 1545.770909] env[65503]: _type = "Task" [ 1545.770909] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.781974] env[65503]: DEBUG oslo_vmware.api [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451162, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.910802] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428547} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.911122] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d68ffece-ab91-4610-b535-fa1fb25ade93/d68ffece-ab91-4610-b535-fa1fb25ade93.vmdk to [datastore2] d3d0d93b-f41a-468f-ab9c-b301c40301e7/d3d0d93b-f41a-468f-ab9c-b301c40301e7.vmdk {{(pid=65503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1545.911339] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Extending root virtual disk to 1048576 {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1545.911595] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e423f60-75e7-4bb2-b575-55929a9d7dbe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.920522] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1545.920522] env[65503]: value = "task-4451163" [ 1545.920522] env[65503]: _type = "Task" [ 1545.920522] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.930193] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451163, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.281545] env[65503]: DEBUG oslo_vmware.api [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451162, 'name': PowerOffVM_Task, 'duration_secs': 0.188963} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.281725] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1546.281822] env[65503]: DEBUG nova.compute.manager [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1546.282602] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc1db29-7d3d-49f2-97e1-a8269c6ce645 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.431062] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451163, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066212} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.431062] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Extended root virtual disk {{(pid=65503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1546.431813] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0cfae4-bdad-46ec-8358-387923385b3b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.453911] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] d3d0d93b-f41a-468f-ab9c-b301c40301e7/d3d0d93b-f41a-468f-ab9c-b301c40301e7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1546.454665] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49bc0c1f-21b9-4412-b6c1-81e8a39ff271 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.474647] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1546.474647] env[65503]: value = "task-4451164" [ 1546.474647] env[65503]: _type = "Task" [ 1546.474647] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.482704] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451164, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.796119] env[65503]: DEBUG oslo_concurrency.lockutils [None req-d9401094-11ad-456f-814d-fbce5a1bf1cb tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1546.984600] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451164, 'name': ReconfigVM_Task, 'duration_secs': 0.27188} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.984889] env[65503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Reconfigured VM instance instance-0000007f to attach disk [datastore2] d3d0d93b-f41a-468f-ab9c-b301c40301e7/d3d0d93b-f41a-468f-ab9c-b301c40301e7.vmdk or device None with type sparse {{(pid=65503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1546.985530] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed46b429-950d-4c22-adcd-3d1213d19c3a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.992823] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1546.992823] env[65503]: value = "task-4451165" [ 1546.992823] env[65503]: _type = "Task" [ 1546.992823] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.001818] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451165, 'name': Rename_Task} progress is 5%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.127814] env[65503]: DEBUG nova.objects.instance [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'flavor' on Instance uuid 0f2aec6a-692b-4fee-b0db-42e61146c4db {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1547.503142] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451165, 'name': Rename_Task, 'duration_secs': 0.149187} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.503510] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1547.503662] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10c0d271-98c0-4073-9bf9-ac975fc6f103 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.510586] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1547.510586] env[65503]: value = "task-4451166" [ 1547.510586] env[65503]: _type = "Task" [ 1547.510586] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.518674] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451166, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.632914] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.633188] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1547.633391] env[65503]: DEBUG nova.network.neutron [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1547.633615] env[65503]: DEBUG nova.objects.instance [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'info_cache' on Instance uuid 0f2aec6a-692b-4fee-b0db-42e61146c4db {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1548.020914] env[65503]: DEBUG oslo_vmware.api [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451166, 'name': PowerOnVM_Task, 'duration_secs': 0.442848} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.021165] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1548.021367] env[65503]: INFO nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Took 7.26 seconds to spawn the instance on the hypervisor. [ 1548.021541] env[65503]: DEBUG nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1548.022366] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aafa58d-37ec-4c64-bedb-beb556639cbd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.136704] env[65503]: DEBUG nova.objects.base [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Object Instance<0f2aec6a-692b-4fee-b0db-42e61146c4db> lazy-loaded attributes: flavor,info_cache {{(pid=65503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1548.541297] env[65503]: INFO nova.compute.manager [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Took 11.46 seconds to build instance. [ 1548.639323] env[65503]: WARNING neutronclient.v2_0.client [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1548.640114] env[65503]: WARNING openstack [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1548.640668] env[65503]: WARNING openstack [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1548.765324] env[65503]: WARNING openstack [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1548.765735] env[65503]: WARNING openstack [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1548.833523] env[65503]: WARNING neutronclient.v2_0.client [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1548.834297] env[65503]: WARNING openstack [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1548.834641] env[65503]: WARNING openstack [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1548.926184] env[65503]: DEBUG nova.network.neutron [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updating instance_info_cache with network_info: [{"id": "8c3efc5b-406f-42aa-8a85-06abcc142165", "address": "fa:16:3e:cd:22:05", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c3efc5b-40", "ovs_interfaceid": "8c3efc5b-406f-42aa-8a85-06abcc142165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1549.043346] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a027c80b-08f4-4bc7-a2d6-943c7cae2364 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.969s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1549.389202] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1549.389514] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1549.389698] env[65503]: DEBUG nova.compute.manager [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1549.390706] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdd99a6-88af-4bc1-9c01-56309e431052 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.397970] env[65503]: DEBUG nova.compute.manager [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1549.398574] env[65503]: DEBUG nova.objects.instance [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'flavor' on Instance uuid d3d0d93b-f41a-468f-ab9c-b301c40301e7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1549.428880] env[65503]: DEBUG oslo_concurrency.lockutils [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1550.405834] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1550.406299] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-617034c1-d0ca-4b87-93b5-bb992f2e1c51 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.415019] env[65503]: DEBUG oslo_vmware.api [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1550.415019] env[65503]: value = "task-4451167" [ 1550.415019] env[65503]: _type = "Task" [ 1550.415019] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.423999] env[65503]: DEBUG oslo_vmware.api [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.433819] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powering on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1550.434165] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baf6651f-58d3-4f5d-aadc-0a200e76980e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.443934] env[65503]: DEBUG oslo_vmware.api [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1550.443934] env[65503]: value = "task-4451168" [ 1550.443934] env[65503]: _type = "Task" [ 1550.443934] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.453647] env[65503]: DEBUG oslo_vmware.api [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.925497] env[65503]: DEBUG oslo_vmware.api [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451167, 'name': PowerOffVM_Task, 'duration_secs': 0.189781} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.925774] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1550.925977] env[65503]: DEBUG nova.compute.manager [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1550.926818] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a61811-ae85-4be2-85a2-0b38335ca8be {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.954274] env[65503]: DEBUG oslo_vmware.api [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451168, 'name': PowerOnVM_Task, 'duration_secs': 0.406476} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.954506] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powered on the VM {{(pid=65503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1550.954691] env[65503]: DEBUG nova.compute.manager [None req-2fbf914d-8721-4ed6-a7cb-c198d0a5ca80 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1550.955512] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9947c736-0b6f-42cc-a235-ac58551aa757 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.439915] env[65503]: DEBUG oslo_concurrency.lockutils [None req-a8f76880-a226-424d-a907-cfce70358f9d tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1551.909016] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41157152-d380-4bd2-bd96-3c16e330917b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.916340] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-124dc9e1-43ac-4e8f-8528-766501d02e42 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Suspending the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1551.916586] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-7167f93e-be5f-41b3-bb57-14fb785ad3e6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.923528] env[65503]: DEBUG oslo_vmware.api [None req-124dc9e1-43ac-4e8f-8528-766501d02e42 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1551.923528] env[65503]: value = "task-4451169" [ 1551.923528] env[65503]: _type = "Task" [ 1551.923528] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.932263] env[65503]: DEBUG oslo_vmware.api [None req-124dc9e1-43ac-4e8f-8528-766501d02e42 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451169, 'name': SuspendVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.068893] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1552.069136] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1552.069349] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1552.069542] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1552.069714] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1552.072118] env[65503]: INFO nova.compute.manager [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Terminating instance [ 1552.433351] env[65503]: DEBUG oslo_vmware.api [None req-124dc9e1-43ac-4e8f-8528-766501d02e42 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451169, 'name': SuspendVM_Task} progress is 70%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.576426] env[65503]: DEBUG nova.compute.manager [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1552.576832] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1552.577608] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3808de-46d8-4d7b-bec9-5fbd29b2ce72 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.586716] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1552.586986] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-838aa939-041d-4f5b-a577-891198f98f36 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.651915] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1552.652202] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1552.652371] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleting the datastore file [datastore2] d3d0d93b-f41a-468f-ab9c-b301c40301e7 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1552.652651] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6454fba-18ad-415f-8d75-ecc094c60e3e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.660240] env[65503]: DEBUG oslo_vmware.api [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1552.660240] env[65503]: value = "task-4451171" [ 1552.660240] env[65503]: _type = "Task" [ 1552.660240] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.668714] env[65503]: DEBUG oslo_vmware.api [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.934760] env[65503]: DEBUG oslo_vmware.api [None req-124dc9e1-43ac-4e8f-8528-766501d02e42 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451169, 'name': SuspendVM_Task, 'duration_secs': 0.646673} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.935137] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-124dc9e1-43ac-4e8f-8528-766501d02e42 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Suspended the VM {{(pid=65503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1552.935326] env[65503]: DEBUG nova.compute.manager [None req-124dc9e1-43ac-4e8f-8528-766501d02e42 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1552.936122] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb0b94e-78e6-48f4-9594-5ec7eb47bede {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.169516] env[65503]: DEBUG oslo_vmware.api [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451171, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20473} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.169782] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1553.169973] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1553.170323] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1553.170507] env[65503]: INFO nova.compute.manager [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1553.170739] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1553.170953] env[65503]: DEBUG nova.compute.manager [-] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1553.171101] env[65503]: DEBUG nova.network.neutron [-] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1553.171328] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1553.171840] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1553.172104] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1553.206956] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1553.449512] env[65503]: DEBUG nova.compute.manager [req-976e33ce-8bce-4520-81c8-8657a3e8d005 req-14fa61b1-715d-4627-af7e-4fe892999d10 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Received event network-vif-deleted-89e0bccc-bb9e-4366-a74f-e5a899941a4b {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1553.449860] env[65503]: INFO nova.compute.manager [req-976e33ce-8bce-4520-81c8-8657a3e8d005 req-14fa61b1-715d-4627-af7e-4fe892999d10 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Neutron deleted interface 89e0bccc-bb9e-4366-a74f-e5a899941a4b; detaching it from the instance and deleting it from the info cache [ 1553.450193] env[65503]: DEBUG nova.network.neutron [req-976e33ce-8bce-4520-81c8-8657a3e8d005 req-14fa61b1-715d-4627-af7e-4fe892999d10 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1553.924753] env[65503]: DEBUG nova.network.neutron [-] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1553.954473] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3694c0f7-6d81-43f1-82a9-0629c76a2fa5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.965052] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57860aac-7033-4d45-9df2-c56215633992 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.993423] env[65503]: DEBUG nova.compute.manager [req-976e33ce-8bce-4520-81c8-8657a3e8d005 req-14fa61b1-715d-4627-af7e-4fe892999d10 service nova] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Detach interface failed, port_id=89e0bccc-bb9e-4366-a74f-e5a899941a4b, reason: Instance d3d0d93b-f41a-468f-ab9c-b301c40301e7 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1554.295101] env[65503]: INFO nova.compute.manager [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Resuming [ 1554.295682] env[65503]: DEBUG nova.objects.instance [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'flavor' on Instance uuid 0f2aec6a-692b-4fee-b0db-42e61146c4db {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1554.426600] env[65503]: INFO nova.compute.manager [-] [instance: d3d0d93b-f41a-468f-ab9c-b301c40301e7] Took 1.26 seconds to deallocate network for instance. [ 1554.933134] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1554.933450] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1554.933648] env[65503]: DEBUG nova.objects.instance [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'resources' on Instance uuid d3d0d93b-f41a-468f-ab9c-b301c40301e7 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1555.323867] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.490208] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ce5740-d367-4845-ace9-a03621a72f89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.498746] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bd0a8f-71de-4adb-a6bc-4ebda08b7b87 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.528724] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f91c0d-c1fe-4374-8647-efbd72287e1d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.536986] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a724b2d-95b3-4cc4-b01f-f1024ed1de6d {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.550561] env[65503]: DEBUG nova.compute.provider_tree [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.808476] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.808711] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquired lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1555.808845] env[65503]: DEBUG nova.network.neutron [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Building network info cache for instance {{(pid=65503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2096}} [ 1556.053506] env[65503]: DEBUG nova.scheduler.client.report [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1556.311504] env[65503]: WARNING neutronclient.v2_0.client [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1556.312174] env[65503]: WARNING openstack [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1556.312531] env[65503]: WARNING openstack [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1556.323874] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1556.440534] env[65503]: WARNING openstack [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1556.440947] env[65503]: WARNING openstack [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1556.502292] env[65503]: WARNING neutronclient.v2_0.client [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1556.503052] env[65503]: WARNING openstack [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1556.503462] env[65503]: WARNING openstack [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1556.558250] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1556.580813] env[65503]: INFO nova.scheduler.client.report [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted allocations for instance d3d0d93b-f41a-468f-ab9c-b301c40301e7 [ 1556.584466] env[65503]: DEBUG nova.network.neutron [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updating instance_info_cache with network_info: [{"id": "8c3efc5b-406f-42aa-8a85-06abcc142165", "address": "fa:16:3e:cd:22:05", "network": {"id": "5268092e-fde1-4ba7-847a-2657cb32ed3a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-302445083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93906c603f7a4b18a34fc4b42fb6d6c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c3efc5b-40", "ovs_interfaceid": "8c3efc5b-406f-42aa-8a85-06abcc142165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1557.087430] env[65503]: DEBUG oslo_concurrency.lockutils [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Releasing lock "refresh_cache-0f2aec6a-692b-4fee-b0db-42e61146c4db" {{(pid=65503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1557.088452] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d575a8-ebcf-44bc-a5e6-74700e851911 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.094714] env[65503]: DEBUG oslo_concurrency.lockutils [None req-9f0ae120-6c10-4b14-9b1f-ad732d59ec74 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "d3d0d93b-f41a-468f-ab9c-b301c40301e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.025s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1557.098328] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Resuming the VM {{(pid=65503) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1557.098586] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00af58ef-6d1d-48b5-8498-1675af0eda5e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.105532] env[65503]: DEBUG oslo_vmware.api [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1557.105532] env[65503]: value = "task-4451172" [ 1557.105532] env[65503]: _type = "Task" [ 1557.105532] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.115891] env[65503]: DEBUG oslo_vmware.api [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.323800] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.618194] env[65503]: DEBUG oslo_vmware.api [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.856100] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.856100] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.856410] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.856605] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.856844] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1557.859369] env[65503]: INFO nova.compute.manager [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Terminating instance [ 1558.116546] env[65503]: DEBUG oslo_vmware.api [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451172, 'name': PowerOnVM_Task, 'duration_secs': 0.803167} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.117049] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Resumed the VM {{(pid=65503) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1558.117049] env[65503]: DEBUG nova.compute.manager [None req-1db2125c-bfa2-4c78-9dc8-1a215922a191 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Checking state {{(pid=65503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1558.117713] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d57d7d-5fde-4497-98b1-b8ae320ce5fe {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.324363] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1558.324579] env[65503]: DEBUG nova.compute.manager [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1558.363714] env[65503]: DEBUG nova.compute.manager [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1558.363954] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1558.365309] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2bcf08-88cd-4fa4-a491-de6c1bb6a68e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.373408] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1558.373629] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-501416d4-3d5d-471e-93ef-34754e65741b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.380253] env[65503]: DEBUG oslo_vmware.api [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1558.380253] env[65503]: value = "task-4451173" [ 1558.380253] env[65503]: _type = "Task" [ 1558.380253] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.389126] env[65503]: DEBUG oslo_vmware.api [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.890360] env[65503]: DEBUG oslo_vmware.api [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451173, 'name': PowerOffVM_Task, 'duration_secs': 0.224638} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.890584] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1558.890747] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1558.890994] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b4c632b-1136-4ba5-a320-126cac96ac3c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.952380] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1558.952590] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Deleting contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1558.952768] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleting the datastore file [datastore2] 53e5de8f-1a2a-4b17-a3cd-43888dc70be9 {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1558.953077] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62b171d0-2708-4639-aaac-a316a9348dd5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.961135] env[65503]: DEBUG oslo_vmware.api [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for the task: (returnval){ [ 1558.961135] env[65503]: value = "task-4451175" [ 1558.961135] env[65503]: _type = "Task" [ 1558.961135] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.969219] env[65503]: DEBUG oslo_vmware.api [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.024923] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1559.025222] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1559.025432] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "0f2aec6a-692b-4fee-b0db-42e61146c4db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1559.025592] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1559.025759] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1559.027899] env[65503]: INFO nova.compute.manager [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Terminating instance [ 1559.472396] env[65503]: DEBUG oslo_vmware.api [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Task: {'id': task-4451175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401261} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.472781] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1559.472851] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Deleted contents of the VM from datastore datastore2 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1559.473032] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1559.473210] env[65503]: INFO nova.compute.manager [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1559.473441] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1559.473646] env[65503]: DEBUG nova.compute.manager [-] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1559.473741] env[65503]: DEBUG nova.network.neutron [-] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1559.473973] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1559.474513] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1559.474764] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1559.511013] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1559.531633] env[65503]: DEBUG nova.compute.manager [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Start destroying the instance on the hypervisor. {{(pid=65503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1559.531847] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Destroying instance {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1559.533200] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681f16d9-0ef0-4873-b1ec-aef3d7c08f70 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.541698] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powering off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1559.541957] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1676fe5-799e-4405-94da-725ee986744a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.549881] env[65503]: DEBUG oslo_vmware.api [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1559.549881] env[65503]: value = "task-4451176" [ 1559.549881] env[65503]: _type = "Task" [ 1559.549881] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.559233] env[65503]: DEBUG oslo_vmware.api [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.842160] env[65503]: DEBUG nova.compute.manager [req-dfdd2591-d4d6-47f1-8692-0c987e434b12 req-6af73390-6515-4129-a965-f20e0de59bbe service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Received event network-vif-deleted-b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1559.842337] env[65503]: INFO nova.compute.manager [req-dfdd2591-d4d6-47f1-8692-0c987e434b12 req-6af73390-6515-4129-a965-f20e0de59bbe service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Neutron deleted interface b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea; detaching it from the instance and deleting it from the info cache [ 1559.842504] env[65503]: DEBUG nova.network.neutron [req-dfdd2591-d4d6-47f1-8692-0c987e434b12 req-6af73390-6515-4129-a965-f20e0de59bbe service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1560.060114] env[65503]: DEBUG oslo_vmware.api [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451176, 'name': PowerOffVM_Task, 'duration_secs': 0.19633} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.060346] env[65503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Powered off the VM {{(pid=65503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1560.060508] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Unregistering the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1560.060766] env[65503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3636c95-00a4-403f-9b40-6bb38043b4f9 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.131968] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Unregistered the VM {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1560.132231] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Deleting contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1560.132420] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleting the datastore file [datastore1] 0f2aec6a-692b-4fee-b0db-42e61146c4db {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.132690] env[65503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b11df98-c900-4de0-a236-3a6e13cb895c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.139733] env[65503]: DEBUG oslo_vmware.api [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for the task: (returnval){ [ 1560.139733] env[65503]: value = "task-4451178" [ 1560.139733] env[65503]: _type = "Task" [ 1560.139733] env[65503]: } to complete. {{(pid=65503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.148831] env[65503]: DEBUG oslo_vmware.api [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.323710] env[65503]: DEBUG nova.network.neutron [-] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1560.344676] env[65503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4478992c-516b-4381-b0d6-f26e67e01ae7 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.355184] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b0da57-4984-4dbc-9364-41731c8c391a {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.380761] env[65503]: DEBUG nova.compute.manager [req-dfdd2591-d4d6-47f1-8692-0c987e434b12 req-6af73390-6515-4129-a965-f20e0de59bbe service nova] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Detach interface failed, port_id=b1f52fdf-8ab5-48fc-9ddb-e1d2aa3abaea, reason: Instance 53e5de8f-1a2a-4b17-a3cd-43888dc70be9 could not be found. {{(pid=65503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1560.649426] env[65503]: DEBUG oslo_vmware.api [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Task: {'id': task-4451178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167078} completed successfully. {{(pid=65503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.649828] env[65503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted the datastore file {{(pid=65503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1560.649828] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Deleted contents of the VM from datastore datastore1 {{(pid=65503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1560.650023] env[65503]: DEBUG nova.virt.vmwareapi.vmops [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Instance destroyed {{(pid=65503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1560.650196] env[65503]: INFO nova.compute.manager [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1560.650425] env[65503]: DEBUG oslo.service.backend._eventlet.loopingcall [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/loopingcall.py:437}} [ 1560.650613] env[65503]: DEBUG nova.compute.manager [-] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Deallocating network for instance {{(pid=65503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1560.650707] env[65503]: DEBUG nova.network.neutron [-] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] deallocate_for_instance() {{(pid=65503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1886}} [ 1560.650948] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1560.651483] env[65503]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1560.651751] env[65503]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1560.696515] env[65503]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1560.827212] env[65503]: INFO nova.compute.manager [-] [instance: 53e5de8f-1a2a-4b17-a3cd-43888dc70be9] Took 1.35 seconds to deallocate network for instance. [ 1561.318942] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1561.334423] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1561.334720] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1561.334940] env[65503]: DEBUG nova.objects.instance [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lazy-loading 'resources' on Instance uuid 53e5de8f-1a2a-4b17-a3cd-43888dc70be9 {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1561.645924] env[65503]: DEBUG nova.network.neutron [-] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Updating instance_info_cache with network_info: [] {{(pid=65503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1561.867626] env[65503]: DEBUG nova.compute.manager [req-b89d8e40-f7bb-477d-83f4-95ebb2369a46 req-552f680c-9ab9-46b5-8ec7-edce3db7479b service nova] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Received event network-vif-deleted-8c3efc5b-406f-42aa-8a85-06abcc142165 {{(pid=65503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1561.885058] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae53d2ca-c3a1-476e-9e5c-b98cf6bda09c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.893546] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993250c0-8c28-4492-9b68-70052ce2f55b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.924852] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6645e7-6c21-4825-acf5-1fda0dc5bbe5 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.932421] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8975c375-6409-4ef7-ae89-1354207c378e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.947701] env[65503]: DEBUG nova.compute.provider_tree [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1562.149169] env[65503]: INFO nova.compute.manager [-] [instance: 0f2aec6a-692b-4fee-b0db-42e61146c4db] Took 1.50 seconds to deallocate network for instance. [ 1562.340183] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1562.340183] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1562.450684] env[65503]: DEBUG nova.scheduler.client.report [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1562.657079] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1562.833838] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1562.956023] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1562.957881] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.301s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1562.958122] env[65503]: DEBUG nova.objects.instance [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lazy-loading 'resources' on Instance uuid 0f2aec6a-692b-4fee-b0db-42e61146c4db {{(pid=65503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1562.973945] env[65503]: INFO nova.scheduler.client.report [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Deleted allocations for instance 53e5de8f-1a2a-4b17-a3cd-43888dc70be9 [ 1563.481042] env[65503]: DEBUG oslo_concurrency.lockutils [None req-0b66c7f2-8a67-4b56-bae2-e90d2b6540e7 tempest-ServersTestJSON-1867714447 tempest-ServersTestJSON-1867714447-project-member] Lock "53e5de8f-1a2a-4b17-a3cd-43888dc70be9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.625s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1563.491465] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d77972a-0ef2-4318-859f-555e4ec198bf {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.500209] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cbd0cd-4393-4340-9f17-3ebb2984909e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.534147] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2226ff8b-3a26-4e7b-a083-c15a49e834d6 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.543250] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489e53f3-5302-4d0d-872b-ac8763fa89bb {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.559530] env[65503]: DEBUG nova.compute.provider_tree [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.062689] env[65503]: DEBUG nova.scheduler.client.report [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1564.567514] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1564.569976] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.736s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1564.570069] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1564.570246] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1564.571149] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d990ba3-4f32-4d6d-963c-fc80f64aac9b {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.580784] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc3a3a6-c0ec-4639-b1e4-5a1c5977124e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.596605] env[65503]: INFO nova.scheduler.client.report [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Deleted allocations for instance 0f2aec6a-692b-4fee-b0db-42e61146c4db [ 1564.598140] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e2d273-e56f-4eab-97ef-3e942d188365 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.608310] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51c09b2-e9c0-4da7-b8a0-7deeebe5832e {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.639430] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180342MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1564.639722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1564.639722] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1565.107915] env[65503]: DEBUG oslo_concurrency.lockutils [None req-3abf3623-1bf5-4be3-8ea9-ae637c39c103 tempest-ServerActionsTestJSON-130133197 tempest-ServerActionsTestJSON-130133197-project-member] Lock "0f2aec6a-692b-4fee-b0db-42e61146c4db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.083s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1565.659706] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1565.659893] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=65503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1565.674795] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae832541-d22d-4caa-b1b2-364392e82e2f {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.685091] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fbf61c-8212-436c-8ea4-88cea6a16b89 {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.717022] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5ba502-948c-4451-a145-6512377d84bd {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.726319] env[65503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ae9d2d-fa50-4800-8333-e7d7a27d8a2c {{(pid=65503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.740709] env[65503]: DEBUG nova.compute.provider_tree [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed in ProviderTree for provider: 988ff85a-1d12-41bb-a369-e298e8491ca1 {{(pid=65503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.243709] env[65503]: DEBUG nova.scheduler.client.report [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Inventory has not changed for provider 988ff85a-1d12-41bb-a369-e298e8491ca1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1566.753039] env[65503]: DEBUG nova.compute.resource_tracker [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1566.753039] env[65503]: DEBUG oslo_concurrency.lockutils [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.113s {{(pid=65503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1569.753701] env[65503]: DEBUG oslo_service.periodic_task [None req-78bda7e5-172c-47ab-8eea-d42c251b5242 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}